repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_aerial_image.align_gt.measure.py | import sys
import os
import numpy as np
import test
# CHANGE to the path of your own read.py script:
sys.path.append("../../../data/AerialImageDataset")
import read
sys.path.append("../../utils")
import run_utils
import python_utils
import geo_utils
import polygon_utils
# --- Command-line FLAGS --- #
# --- --- #
# --- Params --- #
# CHANGE to you own test config file:
TEST_CONFIG_NAME = "config.test.aerial_image.align_gt"
PERFECT_GT_POLYGONS_DIRNAME = "manually_aligned_gt_polygons"
GT_POLYGONS_DIRNAME_LIST = [
"gt_polygons",
"aligned_gt_polygons",
"aligned_gt_polygons_1",
"aligned_gt_polygons_2",
"noisy_gt_polygons",
"aligned_noisy_gt_polygons",
"aligned_noisy_gt_polygons_1",
"aligned_noisy_gt_polygons_2",
]
THRESHOLDS = np.arange(0, 32.25, 0.25)
# --- --- #
def measure_image(dataset_raw_dirpath, image_info, perfect_gt_polygons_dirname, gt_polygons_dirname_list, thresholds, output_dir_stem):
accuracies_filename_format = "{}.accuracy.npy"
# --- Load shapefiles --- #
# CHANGE the arguments of the load_gt_data() function if using your own and it does not take the same arguments:
image_filepath = read.get_image_filepath(dataset_raw_dirpath, image_info["city"], image_info["number"])
polygons_filename_format = read.IMAGE_NAME_FORMAT + ".shp"
perfect_gt_polygons_filepath = read.get_polygons_filepath(dataset_raw_dirpath, perfect_gt_polygons_dirname,
image_info["city"], image_info["number"],
overwrite_polygons_filename_format=polygons_filename_format)
perfect_gt_polygons, _ = geo_utils.get_polygons_from_shapefile(image_filepath, perfect_gt_polygons_filepath)
if perfect_gt_polygons is None:
return None
perfect_gt_polygons = polygon_utils.orient_polygons(perfect_gt_polygons)
print("len(perfect_gt_polygons) = {}".format(len(perfect_gt_polygons)))
for gt_polygons_dirname in gt_polygons_dirname_list:
gt_polygons = read.load_polygons(dataset_raw_dirpath, gt_polygons_dirname, image_info["city"],
image_info["number"])
if gt_polygons is None:
break
gt_polygons = polygon_utils.orient_polygons(gt_polygons)
# CHANGE the arguments of the IMAGE_NAME_FORMAT format string if using your own and it does not take the same arguments:
image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
# --- Measure accuracies --- #
output_dir = output_dir_stem + "." + gt_polygons_dirname
if not os.path.exists(output_dir):
os.makedirs(output_dir)
accuracies_filename = accuracies_filename_format.format(image_name)
accuracies_filepath = os.path.join(output_dir, accuracies_filename)
accuracies = test.measure_accuracies(perfect_gt_polygons, gt_polygons, thresholds, accuracies_filepath)
print(accuracies)
def main():
# load config file
config_test = run_utils.load_config(TEST_CONFIG_NAME)
# # Handle FLAGS
# if FLAGS.batch_size is not None:
# batch_size = FLAGS.batch_size
# else:
# batch_size = config_test["batch_size"]
# print("#--- Used params: ---#")
# print("batch_size: {}".format(FLAGS.batch_size))
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config_test["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
dataset_raw_dirpath = os.path.join(data_dir, config_test["dataset_raw_partial_dirpath"])
output_dir_stem = config_test["align_dir"]
for images_info in config_test["images_info_list"]:
for number in images_info["numbers"]:
image_info = {
"city": images_info["city"],
"number": number,
}
measure_image(dataset_raw_dirpath, image_info,
PERFECT_GT_POLYGONS_DIRNAME, GT_POLYGONS_DIRNAME_LIST, THRESHOLDS, output_dir_stem)
if __name__ == '__main__':
main()
| 4,212 | 34.70339 | 135 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/model.py | import sys
import os
import time
import tensorflow as tf
import numpy as np
from tqdm import tqdm
import model_utils
# import model_utils_concat_interm_outputs
import loss_utils
sys.path.append("../evaluate_funcs") # Evaluation functions
import evaluate_utils
sys.path.append("../utils") # Mapalign utils
import visualization
sys.path.append("../../utils") # All project utils
import python_utils
import polygon_utils
import tf_utils
import image_utils
import print_utils
class MapAlignModel:
def __init__(self, model_name, input_res,
add_image_input, image_channel_count,
image_feature_base_count,
add_poly_map_input, poly_map_channel_count,
poly_map_feature_base_count,
common_feature_base_count, pool_count,
add_disp_output, disp_channel_count,
add_seg_output, seg_channel_count,
output_res,
batch_size,
loss_params,
level_loss_coefs_params,
learning_rate_params,
weight_decay,
image_dynamic_range, disp_map_dynamic_range_fac,
disp_max_abs_value):
"""
Methods that may need a re-write if changing this class's code:
- get_input_res
- get_output_res
:param model_name:
:param input_res:
:param add_image_input:
:param image_channel_count:
:param image_feature_base_count:
:param add_poly_map_input:
:param poly_map_channel_count:
:param poly_map_feature_base_count:
:param common_feature_base_count:
:param pool_count:
:param add_disp_output:
:param disp_channel_count:
:param add_seg_output:
:param seg_channel_count:
:param output_res:
:param batch_size:
:param loss_params:
:param level_loss_coefs_params:
:param learning_rate_params:
:param weight_decay:
:param image_dynamic_range:
:param disp_map_dynamic_range_fac:
:param disp_max_abs_value:
"""
assert type(model_name) == str, "model_name should be a string, not a {}".format(type(model_name))
assert type(input_res) == int, "input_res should be an int, not a {}".format(type(input_res))
assert type(add_image_input) == bool, "add_image_input should be a bool, not a {}".format(type(add_image_input))
assert type(image_channel_count) == int, "image_channel_count should be an int, not a {}".format(
type(image_channel_count))
assert type(image_feature_base_count) == int, "image_feature_base_count should be an int, not a {}".format(
type(image_feature_base_count))
assert type(add_poly_map_input) == bool, "add_poly_map_input should be a bool, not a {}".format(
type(add_poly_map_input))
assert type(poly_map_channel_count) == int, "poly_map_channel_count should be an int, not a {}".format(
type(poly_map_channel_count))
assert type(
poly_map_feature_base_count) == int, "poly_map_feature_base_count should be an int, not a {}".format(
type(poly_map_feature_base_count))
assert type(common_feature_base_count) == int, "common_feature_base_count should be an int, not a {}".format(
type(common_feature_base_count))
assert type(pool_count) == int, "pool_count should be an int, not a {}".format(type(pool_count))
assert type(add_disp_output) == bool, "add_disp_output should be a bool, not a {}".format(type(add_disp_output))
assert type(disp_channel_count) == int, "disp_channel_count should be an int, not a {}".format(
type(disp_channel_count))
assert type(add_seg_output) == bool, "add_seg_output should be a bool, not a {}".format(type(add_seg_output))
assert type(seg_channel_count) == int, "seg_channel_count should be an int, not a {}".format(
type(seg_channel_count))
assert type(output_res) == int, "output_res should be an int, not a {}".format(type(output_res))
assert type(batch_size) == int, "batch_size should be an int, not a {}".format(type(batch_size))
assert type(loss_params) == dict, "loss_params should be a dict, not a {}".format(type(loss_params))
assert type(level_loss_coefs_params) == list, "level_loss_coefs_params should be a list, not a {}".format(
type(level_loss_coefs_params))
assert type(learning_rate_params) == dict, "learning_rate_params should be a dict, not a {}".format(
type(learning_rate_params))
assert type(weight_decay) == float, "weight_decay should be a float, not a {}".format(type(weight_decay))
assert type(image_dynamic_range) == list, "image_dynamic_range should be a string, not a {}".format(
type(image_dynamic_range))
assert type(
disp_map_dynamic_range_fac) == float, "disp_map_dynamic_range_fac should be a float, not a {}".format(
type(disp_map_dynamic_range_fac))
assert type(disp_max_abs_value) == float or type(
disp_max_abs_value) == int, "disp_max_abs_value should be a number, not a {}".format(
type(disp_max_abs_value))
# Re-init Tensorflow
self.init_tf()
# Init attributes from arguments
self.model_name = model_name
self.input_res = input_res
self.add_image_input = add_image_input
self.image_channel_count = image_channel_count
self.image_feature_base_count = image_feature_base_count
self.add_poly_map_input = add_poly_map_input
self.poly_map_channel_count = poly_map_channel_count
self.poly_map_feature_base_count = poly_map_feature_base_count
self.common_feature_base_count = common_feature_base_count
self.pool_count = pool_count
# Check if input_res is high enough:
min_input_res = self.get_min_input_res(self.pool_count)
if self.input_res < min_input_res:
raise ValueError("WARNING: the given input_res = {} is too small. "
"The model can handle images of resolution {} minimum. Aborting..."
.format(self.input_res, min_input_res))
self.add_disp_output = add_disp_output
self.disp_channel_count = disp_channel_count
self.add_seg_output = add_seg_output
self.seg_channel_count = seg_channel_count
self.output_res = output_res
self.batch_size = batch_size
self.weight_decay = weight_decay
self.image_dynamic_range = image_dynamic_range
self.disp_map_dynamic_range_fac = disp_map_dynamic_range_fac
self.disp_max_abs_value = disp_max_abs_value
# Create placeholders
self.input_image, \
self.input_disp_polygon_map, \
self.gt_disp_field_map, \
self.gt_seg, \
self.gt_polygons, \
self.disp_polygons = self.create_placeholders()
# --- Create model --- #
# # concat_interm_outputs:
# self.level_0_disp_pred, \
# self.stacked_disp_preds, \
# self.level_0_seg_pred, \
# self.stacked_seg_pred_logits, \
# self.keep_prob = model_utils_concat_interm_outputs.build_double_unet(self.input_image,
# self.input_disp_polygon_map,
# self.image_feature_base_count,
# self.poly_map_feature_base_count,
# self.common_feature_base_count,
# self.pool_count,
# self.disp_channel_count,
# add_seg_output=self.add_seg_output,
# seg_channel_count=self.seg_channel_count,
# weight_decay=self.weight_decay)
# # Old way:
# self.level_0_disp_pred, \
# self.stacked_disp_preds, \
# self.level_0_seg_pred, \
# self.stacked_seg_pred_logits, \
# self.keep_prob = model_utils.build_double_unet(self.input_image, self.input_disp_polygon_map,
# self.image_feature_base_count,
# self.poly_map_feature_base_count,
# self.common_feature_base_count, self.pool_count,
# self.disp_channel_count,
# add_seg_output=self.add_seg_output,
# seg_channel_count=self.seg_channel_count,
# weight_decay=self.weight_decay)
# New way:
input_branch_params_list = []
if self.add_image_input:
input_branch_params_list.append({
"tensor": self.input_image,
"name": "image",
"feature_base_count": self.image_feature_base_count,
})
if self.add_poly_map_input:
input_branch_params_list.append({
"tensor": self.input_disp_polygon_map,
"name": "poly_map",
"feature_base_count": self.poly_map_feature_base_count,
})
output_branch_params_list = []
if self.add_disp_output:
output_branch_params_list.append({
"feature_base_count": self.common_feature_base_count,
"channel_count": self.disp_channel_count,
"activation": tf.nn.tanh,
"name": "disp",
})
if self.add_seg_output:
output_branch_params_list.append({
"feature_base_count": self.common_feature_base_count,
"channel_count": self.seg_channel_count,
"activation": tf.identity,
"name": "seg",
})
outputs, self.keep_prob = model_utils.build_multibranch_unet(input_branch_params_list, self.pool_count,
self.common_feature_base_count,
output_branch_params_list,
weight_decay=self.weight_decay)
if self.add_disp_output:
index = 0
_, self.stacked_disp_preds, self.level_0_disp_pred = outputs[index]
else:
self.stacked_disp_preds = self.level_0_disp_pred = None
if self.add_seg_output:
index = self.add_disp_output # 0 if there is no disp_output, 1 if there is
self.stacked_seg_pred_logits, _, self.level_0_seg_pred = outputs[index]
# # --- Add polygonization module --- #
# print_utils.print_info(" --- Add polygonization module: --- #")
# polygonization_utils.build_polygonization_module(self.level_0_seg_pred)
# print_utils.print_info(" --- --- #")
else:
self.stacked_seg_pred_logits = self.level_0_seg_pred = None
# --- --- #
# Create training attributes
self.global_step = self.create_global_step()
self.learning_rate = self.build_learning_rate(learning_rate_params)
# Create level_coefs tensor
self.level_loss_coefs = self.build_level_coefs(level_loss_coefs_params)
# Build losses
self.total_loss = self.build_losses(loss_params)
# # Build evaluator
# self.aligned_disp_polygons_batch, self.threshold_accuracies = self.build_evaluator()
# Create optimizer
self.train_step = self.build_optimizer()
# Compute gradient ops
self.grad_x_op = None
self.grad_y_op = None
@staticmethod
def init_tf():
tf.reset_default_graph()
def create_placeholders(self):
input_image = tf.placeholder(tf.float32, [self.batch_size, self.input_res, self.input_res,
self.image_channel_count])
input_disp_polygon_map = tf.placeholder(tf.float32, [self.batch_size, self.input_res,
self.input_res,
self.poly_map_channel_count])
gt_disp_field_map = tf.placeholder(tf.float32, [self.batch_size, self.output_res, self.output_res,
self.disp_channel_count])
gt_seg = tf.placeholder(tf.float32, [self.batch_size, self.input_res, self.input_res,
self.poly_map_channel_count])
gt_polygons = tf.placeholder(tf.float32, [self.batch_size, None, None, 2])
disp_polygons = tf.placeholder(tf.float32, [self.batch_size, None, None, 2])
return input_image, input_disp_polygon_map, gt_disp_field_map, gt_seg, gt_polygons, disp_polygons
@staticmethod
def create_global_step():
return tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
def build_learning_rate(self, learning_rate_params):
return tf.train.piecewise_constant(self.global_step, learning_rate_params["boundaries"],
learning_rate_params["values"])
def build_level_coefs(self, level_loss_coefs_params):
with tf.name_scope('level_coefs'):
level_loss_coefs_list = []
for level_index, level_coef_params in enumerate(level_loss_coefs_params):
level_loss_coef = tf.train.piecewise_constant(self.global_step,
level_coef_params["boundaries"],
level_coef_params["values"],
name="{}".format(level_index))
tf.summary.scalar("{}".format(level_index), level_loss_coef)
level_loss_coefs_list.append(level_loss_coef)
level_loss_coefs = tf.stack(level_loss_coefs_list)
return level_loss_coefs
def build_losses(self, loss_params):
with tf.name_scope('losses'):
if self.add_disp_output:
# Displacement loss
displacement_error = loss_utils.displacement_error(self.gt_disp_field_map,
self.stacked_disp_preds,
self.level_loss_coefs,
self.input_disp_polygon_map,
loss_params["disp"])
tf.summary.scalar('displacement_error', displacement_error)
weighted_displacement_error = loss_params["disp"]["coef"] * displacement_error
tf.summary.scalar('weighted_displacement_error', weighted_displacement_error)
tf.add_to_collection('losses', weighted_displacement_error)
# Laplacian penalty
laplacian_penalty = loss_utils.laplacian_penalty(self.stacked_disp_preds,
self.level_loss_coefs)
tf.summary.scalar('laplacian_penalty', laplacian_penalty)
weighted_laplacian_penalty = loss_params["laplacian_penalty_coef"] * laplacian_penalty
tf.summary.scalar('weighted_laplacian_penalty', weighted_laplacian_penalty)
tf.add_to_collection('losses', weighted_laplacian_penalty)
if self.add_seg_output:
# Segmentation loss
segmentation_error = loss_utils.segmentation_error(self.gt_seg,
self.stacked_seg_pred_logits,
self.level_loss_coefs,
loss_params["seg"])
tf.summary.scalar('segmentation_error', segmentation_error)
weighted_segmentation_error = loss_params["seg"]["coef"] * segmentation_error
tf.summary.scalar('weighted_segmentation_error', weighted_segmentation_error)
tf.add_to_collection('losses', weighted_segmentation_error)
# Add up all losses (objective loss + weigh loss for now)
total_loss = tf.add_n(tf.get_collection('losses'), name='total_loss')
tf.summary.scalar('total_loss', total_loss)
with tf.name_scope('losses_baseline'):
if self.add_disp_output:
# Baseline displacement loss
baseline_stacked_disp_preds = tf.zeros_like(self.stacked_disp_preds)
baseline_displacement_error = loss_utils.displacement_error(self.gt_disp_field_map,
baseline_stacked_disp_preds,
self.level_loss_coefs,
self.input_disp_polygon_map,
loss_params["disp"])
tf.summary.scalar('baseline_displacement_error', baseline_displacement_error)
return total_loss
# def build_evaluator(self):
# thresholds = np.arange(0, 8.0, 0.5)
# disp_max_abs_value = self.disp_max_abs_value
#
# def evaluate(pred_disp_field_map_batch, disp_polygons_batch, gt_polygons_batch):
# # val_gt_disp_field_map_batch *= 2*DISP_MAX_ABS_VALUE # Denormalize
# # val_aligned_disp_polygons_batch = polygon_utils.apply_batch_disp_map_to_polygons(
# # val_gt_disp_field_map_batch, val_disp_polygons_batch)
# pred_disp_field_map_batch *= 2 * disp_max_abs_value # Denormalize
# aligned_disp_polygons_batch = polygon_utils.apply_batch_disp_map_to_polygons(
# pred_disp_field_map_batch, disp_polygons_batch)
# threshold_accuracies = evaluate_utils.compute_threshold_accuracies(gt_polygons_batch,
# aligned_disp_polygons_batch,
# thresholds) # TODO: add padding information to filter out vertices outside output image
# aligned_disp_polygons_batch = aligned_disp_polygons_batch.astype(np.float32)
# threshold_accuracies = np.array(threshold_accuracies).astype(np.float32)
# return aligned_disp_polygons_batch, threshold_accuracies
#
# with tf.name_scope('evaluator'):
# aligned_disp_polygons_batch, threshold_accuracies = tf.py_func(
# evaluate,
# [self.level_0_disp_pred, self.disp_polygons, self.gt_polygons],
# Tout=(tf.float32, tf.float32),
# name="evaluator"
# )
#
# threshold_accuracies.set_shape((1, len(thresholds)))
#
# # tf.summary.scalar('accuracy with threshold 1', threshold_accuracies[0])
# # # tf.summary.scalar('accuracy with threshold 2', threshold_accuracy_2)
# # # tf.summary.scalar('accuracy with threshold 3', threshold_accuracy_3)
# # # tf.summary.scalar('accuracy with threshold 4', threshold_accuracy_4)
# # # tf.summary.scalar('accuracy with threshold 5', threshold_accuracy_5)
# # # tf.summary.scalar('accuracy with threshold 6', threshold_accuracy_6)
# # # tf.summary.scalar('accuracy with threshold 7', threshold_accuracy_7)
# # # tf.summary.scalar('accuracy with threshold 8', threshold_accuracy_8)
#
# return aligned_disp_polygons_batch, threshold_accuracies
def build_optimizer(self):
with tf.name_scope('adam_optimizer'):
optimizer = tf.train.AdamOptimizer(self.learning_rate)
train_step = optimizer.minimize(self.total_loss, global_step=self.global_step)
current_adam_lr = tf_utils.compute_current_adam_lr(optimizer)
tf.summary.scalar('lr', current_adam_lr)
return train_step
def train(self, sess, dataset_tensors, dropout_keep_prob, with_summaries=False, merged_summaries=None,
summaries_writer=None, summary_index=None, plot=False):
"""
:param sess:
:param with_summaries: (Default: False)
:param merged_summaries: Must be not None if with_summaries is True
:param summaries_writer: Must be not None if with_summaries is True
:return:
"""
if with_summaries:
assert merged_summaries is not None and summaries_writer is not None, \
"merged_summaries and writer should be specified if with_summaries is True"
train_image, \
_, \
_, \
train_gt_polygon_map, \
train_gt_disp_field_map, \
train_disp_polygon_map = dataset_tensors
train_image_batch, train_gt_polygon_map_batch, train_gt_disp_field_map_batch, train_disp_polygon_map_batch = sess.run(
[train_image, train_gt_polygon_map, train_gt_disp_field_map, train_disp_polygon_map])
feed_dict = {
self.input_image: train_image_batch,
self.input_disp_polygon_map: train_disp_polygon_map_batch,
self.gt_disp_field_map: train_gt_disp_field_map_batch,
self.gt_seg: train_gt_polygon_map_batch,
self.gt_polygons: tf_utils.create_array_to_feed_placeholder(self.gt_polygons),
self.disp_polygons: tf_utils.create_array_to_feed_placeholder(self.disp_polygons),
self.keep_prob: dropout_keep_prob,
}
if with_summaries:
if summary_index == 0:
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
else:
run_options = run_metadata = None
input_list = [merged_summaries, self.train_step, self.total_loss]
if self.add_disp_output:
input_list.append(self.level_0_disp_pred)
if self.add_seg_output:
input_list.append(self.level_0_seg_pred)
output_list = sess.run(input_list, feed_dict=feed_dict, options=run_options, run_metadata=run_metadata)
extra_output_count = self.add_disp_output + self.add_seg_output
train_summary, _, train_loss = output_list[:-extra_output_count]
train_pred_disp_field_map_batch = train_pred_seg_batch = None
if self.add_disp_output:
index = -extra_output_count
train_pred_disp_field_map_batch = output_list[index]
if self.add_seg_output:
index = -extra_output_count + self.add_disp_output
train_pred_seg_batch = output_list[index]
# TODO: If uncommenting below code, also add relevant code to the "else" block below
# train_summary, _, train_loss, train_pred_disp_field_map_batch = sess.run(
# [merged_summaries, train_step, total_loss, pred_disp_field_map],
# feed_dict={input_image: train_gt_polygon_map_batch, input_disp_polygon_map: train_disp_polygon_map_batch,
# gt_disp_field_map: train_gt_disp_field_map_batch,
# keep_prob: DROPOUT_KEEP_PROB,
# mode_training: True}, options=run_options, run_metadata=run_metadata)
summaries_writer.add_summary(train_summary, summary_index)
if summary_index == 0:
summaries_writer.add_run_metadata(run_metadata, 'step%03d' % summary_index)
print_utils.print_info("step {}, training loss = {}".format(summary_index, train_loss))
if plot:
train_image_batch = (train_image_batch - self.image_dynamic_range[0]) / (
self.image_dynamic_range[1] - self.image_dynamic_range[0])
# train_gt_disp_field_map_batch = train_gt_disp_field_map_batch * 2 # Within [-1, 1]
# train_gt_disp_field_map_batch = train_gt_disp_field_map_batch * self.disp_max_abs_value # Within [-disp_max_abs_value, disp_max_abs_value]
# train_pred_disp_field_map_batch = train_pred_disp_field_map_batch * 2 # Within [-1, 1]
# train_pred_disp_field_map_batch = train_pred_disp_field_map_batch * self.disp_max_abs_value # Within [-disp_max_abs_value, disp_max_abs_value]
# visualization.plot_batch(["Training gt disp", "Training pred disp"], train_image_batch,
# train_gt_polygon_map_batch,
# [train_gt_disp_field_map_batch, train_pred_disp_field_map_batch],
# train_disp_polygon_map_batch)
if self.add_seg_output:
visualization.plot_batch_seg("Training pred seg", train_image_batch, train_pred_seg_batch)
return train_image_batch, train_gt_polygon_map_batch, train_gt_disp_field_map_batch, train_disp_polygon_map_batch, train_pred_disp_field_map_batch, train_pred_seg_batch
else:
_ = sess.run([self.train_step], feed_dict=feed_dict)
return train_image_batch, train_gt_polygon_map_batch, train_gt_disp_field_map_batch, train_disp_polygon_map_batch, None, None
def validate(self, sess, dataset_tensors, merged_summaries, summaries_writer, summary_index, plot=False):
val_image, \
val_gt_polygons, \
val_disp_polygons, \
val_gt_polygon_map, \
val_gt_disp_field_map, \
val_disp_polygon_map = dataset_tensors
val_image_batch, val_gt_polygons_batch, val_disp_polygons_batch, val_gt_polygon_map_batch, val_gt_disp_field_map_batch, val_disp_polygon_map_batch = sess.run(
[val_image, val_gt_polygons, val_disp_polygons, val_gt_polygon_map, val_gt_disp_field_map,
val_disp_polygon_map])
feed_dict = {
self.input_image: val_image_batch,
self.input_disp_polygon_map: val_disp_polygon_map_batch,
self.gt_disp_field_map: val_gt_disp_field_map_batch,
self.gt_seg: val_gt_polygon_map_batch,
self.gt_polygons: val_gt_polygons_batch,
self.disp_polygons: val_disp_polygons_batch,
self.keep_prob: 1.0
}
input_list = [merged_summaries, self.total_loss]
if self.add_disp_output:
input_list.append(self.level_0_disp_pred)
if self.add_seg_output:
input_list.append(self.level_0_seg_pred)
output_list = sess.run(input_list, feed_dict=feed_dict)
extra_output_count = self.add_disp_output + self.add_seg_output
val_summary, val_loss, = output_list[:-extra_output_count]
val_pred_disp_field_map_batch = val_pred_seg_batch = None
if self.add_disp_output:
index = -extra_output_count
val_pred_disp_field_map_batch = output_list[index]
if self.add_seg_output:
index = -extra_output_count + self.add_disp_output
val_pred_seg_batch = output_list[index]
if plot:
val_image_batch = (val_image_batch - self.image_dynamic_range[0]) / (
self.image_dynamic_range[1] - self.image_dynamic_range[0])
# visualization.plot_batch_polygons("Validation plot", val_image_batch, val_gt_polygons_batch,
# val_disp_polygons_batch, val_aligned_disp_polygons_batch)
if self.add_seg_output:
visualization.plot_batch_seg("Validation pred seg", val_image_batch, val_pred_seg_batch)
summaries_writer.add_summary(val_summary, summary_index)
print_utils.print_info("step {}, validation loss = {}".format(summary_index, val_loss))
# print("\t validation threshold accuracies = {}".format(val_threshold_accuracies))
return val_image_batch, val_gt_polygons_batch, val_disp_polygons_batch, val_gt_polygon_map_batch, val_gt_disp_field_map_batch, val_disp_polygon_map_batch, val_pred_disp_field_map_batch, val_pred_seg_batch
def restore_checkpoint(self, sess, saver, checkpoints_dir):
"""
:param sess:
:param saver:
:param checkpoints_dir:
:return: True if a checkpoint was found and restored, False if no checkpoint was found
"""
checkpoint = tf.train.get_checkpoint_state(checkpoints_dir)
if checkpoint and checkpoint.model_checkpoint_path: # Check if the model has a checkpoint
print_utils.print_info(
"Restoring {} checkpoint {}".format(self.model_name, checkpoint.model_checkpoint_path))
try:
saver.restore(sess, checkpoint.model_checkpoint_path)
except tf.errors.InvalidArgumentError:
print_utils.print_error("ERROR: could not load checkpoint.\n"
"\tThis is likely due to: .\n"
"\t\t - the model graph definition has changed from the checkpoint thus weights do not match\n"
.format(checkpoints_dir)
)
exit()
return True
else:
return False
# def get_weight_variables(self, starts_with):
# """
#
# :return: A filtered list of all trainable variables whose names start with starts_with.
# """
# trainable_variables = tf.trainable_variables()
# weight_variables = []
# for var in trainable_variables:
# if var.name.startswith(starts_with):
# weight_variables.append(var)
# return weight_variables
def optimize(self, train_dataset_tensors, val_dataset_tensors,
max_iter, dropout_keep_prob,
logs_dir, train_summary_step, val_summary_step,
checkpoints_dir, checkpoint_step,
init_checkpoints_dirpath=None,
plot_results=False):
"""
:param train_dataset_tensors:
:param val_dataset_tensors: (If None: do not perform validation step)
:param max_iter:
:param dropout_keep_prob:
:param logs_dir:
:param train_summary_step:
:param val_summary_step:
:param checkpoints_dir: Directory to save checkpoints. If this is not the first time launching the optimization,
the weights will be restored form the last checkpoint in that directory
:param checkpoint_step:
:param init_checkpoints_dirpath: If this is the first time launching the optimization, the weights will be
initialized with the last checkpoint in init_checkpoints_dirpath (optional)
:param plot_results: (optional)
:return:
"""
# Summaries
merged_summaries = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(os.path.join(logs_dir, "train"), tf.get_default_graph())
val_writer = tf.summary.FileWriter(os.path.join(logs_dir, "val"), tf.get_default_graph())
# Savers
saver = tf.train.Saver(save_relative_paths=True)
# The op for initializing the variables.
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
with tf.Session() as sess:
sess.run(init_op)
# Restore checkpoint if one exists
restore_checkpoint_success = self.restore_checkpoint(sess, saver, checkpoints_dir)
if not restore_checkpoint_success and init_checkpoints_dirpath is not None:
# This is the first time launching this optimization.
# Create saver with only trainable variables:
init_variables_saver = tf.train.Saver(tf.trainable_variables())
# Restore from init_checkpoints_dirpath if it exists:
restore_checkpoint_success = self.restore_checkpoint(sess, init_variables_saver,
init_checkpoints_dirpath)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
if plot_results:
visualization.init_figures(["Training gt disp", "Training pred disp", "Training pred seg",
"Training polygonization",
"Validation plot", "Validation pred seg"])
print("Model has {} trainable variables".format(
tf_utils.count_number_trainable_params())
)
i = tf.train.global_step(sess, self.global_step)
while i <= max_iter:
if i % train_summary_step == 0:
time_start = time.time()
train_image_batch, \
train_gt_polygon_map_batch, \
train_gt_disp_field_map_batch, \
train_disp_polygon_map_batch, \
train_pred_disp_field_map_batch, \
train_pred_seg_batch = self.train(sess, train_dataset_tensors, dropout_keep_prob,
with_summaries=True, merged_summaries=merged_summaries,
summaries_writer=train_writer, summary_index=i, plot=plot_results)
time_end = time.time()
print("\tIteration done in {}s".format(time_end - time_start))
else:
self.train(sess, train_dataset_tensors, dropout_keep_prob)
if val_dataset_tensors is not None:
# i += 1
# Measure validation loss and accuracy
if i % val_summary_step == 1:
val_image_batch, \
val_gt_polygons_batch, \
val_disp_polygons_batch, \
val_gt_polygon_map_batch, \
val_gt_disp_field_map_batch, \
val_disp_polygon_map_batch, \
val_pred_disp_field_map_batch, val_pred_seg_batch = self.validate(sess, val_dataset_tensors,
merged_summaries, val_writer,
i,
plot=plot_results)
# Save checkpoint
if i % checkpoint_step == (checkpoint_step - 1):
saver.save(sess, os.path.join(checkpoints_dir, self.model_name),
global_step=self.global_step)
i = tf.train.global_step(sess, self.global_step)
coord.request_stop()
coord.join(threads)
train_writer.close()
val_writer.close()
def make_batches_patch_boundingboxes(self, patch_boundingboxes, batch_size):
batches_patch_boundingboxes = []
batch_patch_boundingboxes = []
for patch_boundingbox in patch_boundingboxes:
if len(batch_patch_boundingboxes) < batch_size:
batch_patch_boundingboxes.append(patch_boundingbox)
else:
batches_patch_boundingboxes.append(batch_patch_boundingboxes)
batch_patch_boundingboxes = []
return batches_patch_boundingboxes
def inference(self, image_array, ori_gt_array, checkpoints_dir):
"""
Runs inference on image_array and ori_gt_array with model checkpoint in checkpoints_dir
:param image_array:
:param ori_gt_array:
:param checkpoints_dir:
:return:
"""
spatial_shape = image_array.shape[:2]
if spatial_shape[0] < self.input_res or spatial_shape[1] < self.input_res:
raise ValueError("WARNING: image patch should have spatial shape ({}, {}) instead of {}. "
"Adapt patch size accordingly."
.format(self.input_res, self.input_res, spatial_shape))
# Format inputs
image_array = image_array[:, :, :3] # Remove alpha channel if any
image_array = (image_array / 255) * (self.image_dynamic_range[1] - self.image_dynamic_range[0]) + \
self.image_dynamic_range[0]
ori_gt_array = ori_gt_array / 255
padding = (self.input_res - self.output_res) // 2
# Init displacement field and segmentation image
complete_pred_field_map = np.zeros(
(spatial_shape[0] - 2 * padding, spatial_shape[1] - 2 * padding, self.disp_channel_count))
complete_segmentation_image = np.zeros(
(spatial_shape[0] - 2 * padding, spatial_shape[1] - 2 * padding, self.seg_channel_count))
# visualization.init_figures(["example"])
# Iterate over every patch and predict displacement field for this patch
patch_boundingboxes = image_utils.compute_patch_boundingboxes(spatial_shape,
stride=self.output_res,
patch_res=self.input_res)
batch_boundingboxes_list = list(
python_utils.split_list_into_chunks(patch_boundingboxes, self.batch_size, pad=True))
# Saver
saver = tf.train.Saver(save_relative_paths=True)
with tf.Session() as sess:
# Restore checkpoint
restore_checkpoint_success = self.restore_checkpoint(sess, saver, checkpoints_dir)
if not restore_checkpoint_success:
sys.exit('No checkpoint found in {}'.format(checkpoints_dir))
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
# Loop over every batch
for batch_index, batch_boundingboxes in enumerate(batch_boundingboxes_list):
if batch_index % 10 == 0:
print("Processing batch {}/{}"
.format(batch_index + 1, len(batch_boundingboxes_list)))
# Form batch
batch_image_list = []
batch_ori_gt_list = []
for boundingbox in batch_boundingboxes:
patch_image = image_array[boundingbox[0]:boundingbox[2],
boundingbox[1]:boundingbox[3], :]
patch_ori_gt = ori_gt_array[boundingbox[0]:boundingbox[2],
boundingbox[1]:boundingbox[3], :]
batch_image_list.append(patch_image)
batch_ori_gt_list.append(patch_ori_gt)
batch_image = np.stack(batch_image_list, axis=0)
batch_ori_gt = np.stack(batch_ori_gt_list, axis=0)
if self.add_seg_output:
batch_pred_disp_field_map, batch_pred_seg = sess.run(
[self.level_0_disp_pred, self.level_0_seg_pred], feed_dict={
self.input_image: batch_image,
self.input_disp_polygon_map: batch_ori_gt,
self.keep_prob: 1.0
})
else:
batch_pred_disp_field_map = sess.run(
self.level_0_disp_pred, feed_dict={
self.input_image: batch_image,
self.input_disp_polygon_map: batch_ori_gt,
self.keep_prob: 1.0
})
batch_pred_seg = np.zeros((batch_pred_disp_field_map.shape[0], batch_pred_disp_field_map.shape[1],
batch_pred_disp_field_map.shape[2], self.seg_channel_count))
# Fill complete outputs
for batch_index, boundingbox in enumerate(batch_boundingboxes):
patch_pred_disp_field_map = batch_pred_disp_field_map[batch_index]
patch_pred_seg = batch_pred_seg[batch_index]
# print("--- patch_pred_seg: ---")
# print(patch_pred_seg[:, :, 0])
# print("---")
# print(patch_pred_seg[:, :, 1])
# print("---")
# print(patch_pred_seg[:, :, 2])
# print("---")
# print(patch_pred_seg[:, :, 3])
# print("---")
# # visualization.init_figures(["example", "example 2"])
# visualization.init_figures(["example"])
# patch_image = image_array[boundingbox[0]:boundingbox[2],
# boundingbox[1]:boundingbox[3], :]
# patch_image = (patch_image - self.image_dynamic_range[0]) / (
# self.image_dynamic_range[1] - self.image_dynamic_range[0])
# visualization.plot_seg("example", patch_image, patch_pred_seg)
padded_boundingbox = image_utils.padded_boundingbox(boundingbox, padding)
translated_padded_boundingbox = [x - padding for x in padded_boundingbox]
complete_pred_field_map[
translated_padded_boundingbox[0]:translated_padded_boundingbox[2],
translated_padded_boundingbox[1]:translated_padded_boundingbox[3], :] = patch_pred_disp_field_map
complete_segmentation_image[
translated_padded_boundingbox[0]:translated_padded_boundingbox[2],
translated_padded_boundingbox[1]:translated_padded_boundingbox[3],
:] = patch_pred_seg
# visualization.plot_seg("example 2", patch_image, complete_segmentation_image[
# translated_padded_boundingbox[0]:translated_padded_boundingbox[2],
# translated_padded_boundingbox[1]:translated_padded_boundingbox[3],
# :])
# visualization.plot_example("example",
# patch_image[0],
# patch_ori_gt[0],
# patch_pred_disp_field_map[0],
# patch_ori_gt[0])
coord.request_stop()
coord.join(threads)
# De-normalize field map
complete_pred_field_map = complete_pred_field_map / self.disp_map_dynamic_range_fac # Within [-1, 1]
complete_pred_field_map = complete_pred_field_map * self.disp_max_abs_value # Within [-DISP_MAX_ABS_VALUE, DISP_MAX_ABS_VALUE]
# # De-normalize segmentation image
# complete_segmentation_image = complete_segmentation_image * 255
# complete_segmentation_image = complete_segmentation_image.astype(dtype=np.uint8)
return complete_pred_field_map, complete_segmentation_image
def compute_patch_gradients(self, ori_image, polygon_map_array, checkpoints_dir):
"""
Runs inference on image_array and ori_gt_array with model checkpoint in checkpoints_dir
:param image_array:
:param ori_gt_array:
:param checkpoints_dir:
:return:
"""
spatial_shape = ori_image.shape[:2]
# Format inputs
image = ori_image[:, :, :3] # Remove alpha channel if any
image = (image / 255) * (self.image_dynamic_range[1] - self.image_dynamic_range[0]) + \
self.image_dynamic_range[0]
polygon_map_array = polygon_map_array / 255
# Init patch_gradient_list
patch_info_list = []
# Iterate over every patch and compute all gradients for this patch
patch_bbox_list = image_utils.compute_patch_boundingboxes(spatial_shape,
stride=self.input_res,
patch_res=self.input_res)
y_x = self.level_0_disp_pred[:, :, :, 0]
y_y = self.level_0_disp_pred[:, :, :, 1]
xs = tf.trainable_variables() # All trainable variables
grad_x_ops = tf.gradients(y_x, xs, name='gradients')
grad_y_ops = tf.gradients(y_y, xs, name='gradients')
grad_x_op = [grad_x_op for grad_x_op in grad_x_ops if grad_x_op is not None]
grad_y_op = [grad_y_op for grad_y_op in grad_y_ops if grad_y_op is not None]
# Saver
saver = tf.train.Saver(save_relative_paths=True)
with tf.Session() as sess:
# Restore checkpoint
restore_checkpoint_success = self.restore_checkpoint(sess, saver, checkpoints_dir)
if not restore_checkpoint_success:
sys.exit('No checkpoint found in {}'.format(checkpoints_dir))
# Loop over every patch
for index, bbox in enumerate(tqdm(patch_bbox_list, desc="Computing patch gradients")):
patch_image = image[bbox[0]:bbox[2],
bbox[1]:bbox[3], :]
patch_polygon_map = polygon_map_array[bbox[0]:bbox[2],
bbox[1]:bbox[3], :]
batch_image = np.expand_dims(patch_image, axis=0)
batch_polygon_map = np.expand_dims(patch_polygon_map, axis=0)
feed_dict = {
self.input_image: batch_image,
self.input_disp_polygon_map: batch_polygon_map,
self.keep_prob: 1.0
}
patch_grads_x, patch_grads_y = sess.run([grad_x_op, grad_y_op], feed_dict=feed_dict)
patch_ori_image = ori_image[bbox[0]:bbox[2],
bbox[1]:bbox[3], :]
patch_info = {
"bbox": bbox,
"image": patch_ori_image,
"grads": {
"x": patch_grads_x,
"y": patch_grads_y,
},
}
patch_info_list.append(patch_info)
return patch_info_list
def setup_compute_grads(self):
y_x = self.level_0_disp_pred[:, :, :, 0]
y_y = self.level_0_disp_pred[:, :, :, 1]
xs = tf.trainable_variables() # All trainable variables
grad_x_ops = tf.gradients(y_x, xs, name='gradients')
grad_y_ops = tf.gradients(y_y, xs, name='gradients')
self.grad_x_op = [grad_x_op for grad_x_op in grad_x_ops if grad_x_op is not None]
self.grad_y_op = [grad_y_op for grad_y_op in grad_y_ops if grad_y_op is not None]
def compute_grads(self, sess, image, polygon_map):
"""
Runs inference on image and polygon_map
:param image:
:param polygon_map:
:return:
"""
# Format inputs
image = image[:, :, :3] # Remove alpha channel if any
image = (image / 255) * (self.image_dynamic_range[1] - self.image_dynamic_range[0]) + \
self.image_dynamic_range[0]
polygon_map = polygon_map / 255
batch_image = np.expand_dims(image, axis=0)
batch_polygon_map = np.expand_dims(polygon_map, axis=0)
feed_dict = {
self.input_image: batch_image,
self.input_disp_polygon_map: batch_polygon_map,
self.keep_prob: 1.0
}
patch_level_0_disp_pred, patch_grads_x, patch_grads_y = sess.run([self.level_0_disp_pred, self.grad_x_op, self.grad_y_op], feed_dict=feed_dict)
grads = {
"x": patch_grads_x,
"y": patch_grads_y,
}
return grads, patch_level_0_disp_pred[0]
@staticmethod
def get_output_res(input_res, pool_count):
"""
This function has to be re-written if the model architecture changes
:param input_res:
:param pool_count:
:return:
"""
x, non_zero_remainder = model_utils.get_output_res(input_res, pool_count)
if non_zero_remainder:
print("WARNING: a pooling operation will result in a non integer res, the network will automatically add "
"padding there. The output of this function is not guaranteed to be exact.")
return x
@staticmethod
def get_input_res(output_res, pool_count):
"""
This function has to be re-written if the model architecture changes
:param output_res:
:param pool_count:
:return:
"""
x, non_zero_remainder = model_utils.get_input_res(output_res, pool_count)
if non_zero_remainder:
print("WARNING: a pooling operation will result in a non integer res, the network will automatically add "
"padding there. The output of this function is not guaranteed to be exact.")
return x
@staticmethod
def get_min_input_res(pool_count):
"""
Returns the minimum input resolution the network can handle.
Because of no-padding, the resolution of the ouput is smaller than the input and
thus there is a limit input resolution that works)
This function has to be re-written if the model architecture changes
:param pool_count:
:return:
"""
x = model_utils.get_min_input_res(pool_count)
return x
def main(_):
pool_count = 3
input_res = 124
output_res = MapAlignModel.get_output_res(input_res, pool_count)
print("With input res = {}, the network will output res = {}".format(input_res, output_res))
output_res = 4
input_res = MapAlignModel.get_input_res(output_res, pool_count)
print("For an output res = {}, the network will need an input res = {}".format(output_res, input_res))
min_input_res = MapAlignModel.get_min_input_res(pool_count)
print("Minimum input res the model can handle: {}".format(min_input_res))
if __name__ == '__main__':
tf.app.run(main=main)
| 51,104 | 49.349754 | 212 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_bradbury_buildings.2_align.py | import sys
import os
import numpy as np
import test
sys.path.append("../../../data/bradbury_buildings_roads_height_dataset")
import read as read_bradbury_buildings
sys.path.append("../../utils")
import run_utils
import python_utils
# --- Params --- #
TEST_CONFIG_NAME = "config.test.bradbury_buildings"
KEEP_PROB = 1 # Randomly drop some polygons
# Must be in descending order:
DS_FAC_LIST = [
8,
4,
# 2,
# 1,
]
RUNS_DIRPATH = "runs.igarss2019"
RUN_NAME_LIST = ["ds_fac_{}".format(ds_fac) for ds_fac in DS_FAC_LIST]
OUTPUT_DIRNAME_EXTENTION = "." + ".".join(RUN_NAME_LIST)
# --- --- #
def drop_items(items, keep_prob):
random_numbers = np.random.rand(len(items))
new_items = []
for item, random_number in zip(items, random_numbers):
if random_number < keep_prob:
new_items.append(item)
return new_items
def load_disp_maps(disp_maps_dir, image_info, disp_map_count):
disp_map_filename_format = "{}.disp_{:02d}.disp_map.npy"
disp_map_list = []
for i in range(disp_map_count):
image_name = read_bradbury_buildings.IMAGE_NAME_FORMAT.format(city=image_info["city"],
number=image_info["number"])
disp_map_filename = disp_map_filename_format.format(image_name, i)
disp_map_filepath = os.path.join(disp_maps_dir, disp_map_filename)
disp_map = np.load(disp_map_filepath)
disp_map_list.append(disp_map)
disp_maps = np.stack(disp_map_list, axis=0)
return disp_maps
def test_image(runs_dirpath, dataset_raw_dirpath, image_info, disp_maps_dir, disp_map_count, disp_max_abs_value,
batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir,
output_shapefiles):
# --- Load data --- #
ori_image, ori_metadata, ori_gt_polygons = read_bradbury_buildings.load_gt_data(dataset_raw_dirpath,
image_info["city"],
image_info["number"])
image_name = read_bradbury_buildings.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
# --- Randomly drop some polygons --- #
if KEEP_PROB < 1:
ori_gt_polygons = drop_items(ori_gt_polygons, KEEP_PROB)
# --- Load disp maps --- #
disp_maps = load_disp_maps(disp_maps_dir, image_info, disp_map_count)
test.test_image_with_gt_polygons_and_disp_maps(runs_dirpath, image_name, ori_image, ori_metadata, ori_gt_polygons,
disp_maps,
disp_max_abs_value, batch_size, ds_fac_list, run_name_list,
model_disp_max_abs_value, thresholds, test_output_dir,
output_shapefiles=output_shapefiles)
def main():
# load config file
config_test = run_utils.load_config(TEST_CONFIG_NAME)
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config_test["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
dataset_raw_dirpath = os.path.join(data_dir, config_test["dataset_raw_partial_dirpath"])
output_dir = config_test["align_dir"] + OUTPUT_DIRNAME_EXTENTION
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for images_info in config_test["images_info_list"]:
for number in images_info["numbers"]:
image_info = {
"city": images_info["city"],
"number": number,
}
test_image(RUNS_DIRPATH, dataset_raw_dirpath, image_info, config_test["disp_maps_dir"],
config_test["disp_map_params"]["disp_map_count"],
config_test["disp_map_params"]["disp_max_abs_value"], config_test["batch_size"], DS_FAC_LIST,
RUN_NAME_LIST, config_test["model_disp_max_abs_value"], config_test["thresholds"], output_dir,
config_test["output_shapefiles"])
if __name__ == '__main__':
main()
| 4,322 | 35.948718 | 119 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_bradbury_buildings.3_detect_new_buildings.py | import sys
import os
import numpy as np
import config
import test
import config_test_bradbury_buildings as config_test
sys.path.append("../../../data/bradbury_buildings_roads_height_dataset")
import read
# --- Params --- #
# Models
DS_FAC_LIST = [
# 8,
# 4,
# 2,
1,
] # Must be in descending order
RUN_NAME_LIST = [
# "ds_fac_8",
# "ds_fac_4",
# "ds_fac_2",
"ds_fac_1",
]
assert len(DS_FAC_LIST) == len(RUN_NAME_LIST), "DS_FAC_LIST and RUN_NAME_LIST should have the same length (and match)"
# Both list should match and be in descending order of downsampling factor.
FILL_THRESHOLD = 0.5
OUTLINE_THRESHOLD = 0.05
SELEM_WIDTH = 3
ITERATIONS = 6
TEST_OUTPUT_DIR = config_test.OUTPUT_DIR + ".seg" + ".ds_fac_1"
# --- --- #
def test_detect_new_buildings(image_info, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir):
# --- Load data --- #
ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data(config_test.DATASET_RAW_DIR, image_info["city"], image_info["number"])
image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
polygonization_params = {
"fill_threshold": FILL_THRESHOLD,
"outline_threshold": OUTLINE_THRESHOLD,
"selem_width": SELEM_WIDTH,
"iterations": ITERATIONS,
}
test.test_detect_new_buildings(image_name, ori_image, ori_metadata, ori_gt_polygons, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, polygonization_params, thresholds, test_output_dir, output_shapefiles=config_test.OUTPUT_SHAPEFILES)
def main():
if not os.path.exists(TEST_OUTPUT_DIR):
os.makedirs(TEST_OUTPUT_DIR)
for image_info in config_test.IMAGES:
test_detect_new_buildings(image_info, config_test.BATCH_SIZE, DS_FAC_LIST, RUN_NAME_LIST, config_test.MODEL_DISP_MAX_ABS_VALUE, config_test.THRESHOLDS, TEST_OUTPUT_DIR)
if __name__ == '__main__':
main()
| 1,973 | 28.909091 | 255 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_aerial_image.2_align.py | import sys
import os
import numpy as np
import test
sys.path.insert(0, "../../../data/AerialImageDataset")
import read as read_inria
sys.path.append("../../utils")
import run_utils
import python_utils
# --- Params --- #
TEST_CONFIG_NAME = "config.test.aerial_image"
# Must be in descending order:
DS_FAC_LIST = [
8,
# 4,
# 2,
# 1,
]
RUNS_DIRPATH = "runs.igarss2019"
RUN_NAME_LIST = ["ds_fac_{}".format(ds_fac) for ds_fac in DS_FAC_LIST]
OUTPUT_DIRNAME_EXTENTION = "." + ".".join(RUN_NAME_LIST)
# --- --- #
def load_disp_maps(disp_maps_dir, image_info, disp_map_count):
disp_map_filename_format = "{}.disp_{:02d}.disp_map.npy"
disp_map_list = []
for i in range(disp_map_count):
image_name = read_inria.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
disp_map_filename = disp_map_filename_format.format(image_name, i)
disp_map_filepath = os.path.join(disp_maps_dir, disp_map_filename)
disp_map = np.load(disp_map_filepath)
disp_map_list.append(disp_map)
disp_maps = np.stack(disp_map_list, axis=0)
return disp_maps
def test_image(runs_dirpath, dataset_raw_dirpath, image_info, disp_maps_dir, disp_map_count, disp_max_abs_value,
batch_size, ds_fac_list, run_name_list,
model_disp_max_abs_value, thresholds, test_output_dir, output_shapefiles):
# --- Load data --- #
ori_image, ori_metadata, ori_gt_polygons = read_inria.load_gt_data(dataset_raw_dirpath, image_info["city"],
image_info["number"])
image_name = read_inria.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
# --- Load disp maps --- #
disp_maps = load_disp_maps(disp_maps_dir, image_info, disp_map_count)
test.test_image_with_gt_polygons_and_disp_maps(runs_dirpath, image_name, ori_image, ori_metadata, ori_gt_polygons,
disp_maps,
disp_max_abs_value, batch_size, ds_fac_list, run_name_list,
model_disp_max_abs_value, thresholds, test_output_dir,
output_shapefiles=output_shapefiles)
def main():
# load config file
config_test = run_utils.load_config(TEST_CONFIG_NAME)
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config_test["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
dataset_raw_dirpath = os.path.join(data_dir, config_test["dataset_raw_partial_dirpath"])
output_dir = config_test["align_dir"] + OUTPUT_DIRNAME_EXTENTION
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for images_info in config_test["images_info_list"]:
for number in images_info["numbers"]:
image_info = {
"city": images_info["city"],
"number": number,
}
test_image(RUNS_DIRPATH, dataset_raw_dirpath, image_info, config_test["disp_maps_dir"],
config_test["disp_map_params"]["disp_map_count"],
config_test["disp_map_params"]["disp_max_abs_value"], config_test["batch_size"], DS_FAC_LIST,
RUN_NAME_LIST, config_test["model_disp_max_abs_value"], config_test["thresholds"], output_dir,
config_test["output_shapefiles"])
if __name__ == '__main__':
main()
| 3,647 | 35.48 | 118 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_bradbury_buildings.1_generate_disps.py | import sys
import os
import numpy as np
import config_test_bradbury_buildings as config_test
import test
sys.path.append("../../utils")
import math_utils
import run_utils
import python_utils
sys.path.append("../../../data/bradbury_buildings_roads_height_dataset")
import read
# --- Params --- #
CONFIG_NAME = "config"
TEST_CONFIG_NAME = "config.test.bradbury_buildings"
# --- --- #
def generate_disp_maps(dataset_raw_dir, image_info, disp_map_params, thresholds, output_dir):
disp_map_filename_format = "{}.disp_{:02d}.disp_map.npy"
accuracies_filename_format = "{}.disp_{:02d}.accuracy.npy"
# --- Load data --- #
ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data(dataset_raw_dir, image_info["city"], image_info["number"])
image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
spatial_shape = ori_image.shape[:2]
ori_normed_disp_field_maps = math_utils.create_displacement_field_maps(spatial_shape,
disp_map_params["disp_map_count"],
disp_map_params["disp_modes"],
disp_map_params["disp_gauss_mu_range"],
disp_map_params["disp_gauss_sig_scaling"])
disp_polygons_list = test.generate_disp_data(ori_normed_disp_field_maps, ori_gt_polygons, disp_map_params["disp_max_abs_value"])
# Save disp maps and accuracies individually
for i, (ori_normed_disp_field_map, disp_polygons) in enumerate(zip(ori_normed_disp_field_maps, disp_polygons_list)):
disp_map_filename = disp_map_filename_format.format(image_name, i)
disp_map_filepath = os.path.join(output_dir, disp_map_filename)
np.save(disp_map_filepath, ori_normed_disp_field_map)
accuracies_filename = accuracies_filename_format.format(image_name, i)
accuracies_filepath = os.path.join(output_dir, accuracies_filename)
integer_thresholds = [threshold for threshold in thresholds if (int(threshold) == threshold)]
accuracies = test.measure_accuracies(ori_gt_polygons, disp_polygons, integer_thresholds, accuracies_filepath)
def main():
# load config file
config = run_utils.load_config(CONFIG_NAME)
config_test = run_utils.load_config(TEST_CONFIG_NAME)
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
dataset_raw_dirpath = os.path.join(data_dir, config_test["dataset_raw_partial_dirpath"])
if not os.path.exists(config_test["disp_maps_dir"]):
os.makedirs(config_test["disp_maps_dir"])
for images_info in config_test["images_info_list"]:
for number in images_info["numbers"]:
image_info = {
"city": images_info["city"],
"number": number,
}
generate_disp_maps(dataset_raw_dirpath, image_info, config_test["disp_map_params"],
config_test["thresholds"], config_test["disp_maps_dir"])
if __name__ == '__main__':
main()
| 3,380 | 40.740741 | 132 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/config.py | import os
import sys
sys.path.append("../../utils")
import python_utils
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
# Dataset online processing
DATA_DIR = python_utils.choose_first_existing_path([
"/local/shared/epitome-polygon-deep-learning/data", # Try local node first
"/home/nigirard/epitome-polygon-deep-learning/data",
"/workspace/data", # Try inside Docker image
])
if DATA_DIR is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(DATA_DIR))
REFERENCE_PIXEL_SIZE = 0.3 # In meters.
DS_FAC_LIST = [1, 2, 4, 8]
DS_REPEAT_LIST = [1, 4, 16,
64] # To balance more samples in batches, otherwise there would be too few samples with downsampling_factor=8
IMAGE_DYNAMIC_RANGE = [-1, 1]
DISP_MAP_DYNAMIC_RANGE_FAC = 0.5 # Sets disp_map values in [-0.5, 0.5]
DISP_MAX_ABS_VALUE = 4
TFRECORDS_PARTIAL_DIRPATH_LIST = [
os.path.join(DATA_DIR, "AerialImageDataset/tfrecords.mapalign.multires"),
os.path.join(DATA_DIR, "bradbury_buildings_roads_height_dataset/tfrecords.mapalign.multires"),
os.path.join(DATA_DIR, "mapping_challenge_dataset/tfrecords.mapalign.multires"),
]
TFRECORD_FILENAME_FORMAT = "{}.ds_fac_{:02d}.{{:06d}}.tfrecord" # Dataset fold, downsampling factor, shard index
KEEP_POLY_PROB = 0.1 # Default: 0.1 # Default: 0.5 # Each input misaligned polygon has a 50% change to be kept and 50% to be removed
DATA_AUG = True
# --- Model(s) --- #
INPUT_RES = 220
# Input image
ADD_IMAGE_INPUT = True
IMAGE_CHANNEL_COUNT = 3
IMAGE_FEATURE_BASE_COUNT = 16 * 2 # Default: 16 * 2
# Input poly map
ADD_POLY_MAP_INPUT = True
POLY_MAP_CHANNEL_COUNT = 3 # (0: area, 1: edge, 2: vertex)
POLY_MAP_FEATURE_BASE_COUNT = 8 * 2 # Default: 8 * 2
COMMON_FEATURE_BASE_COUNT = 24 * 2 # Default: 24 * 2
POOL_COUNT = 3 # Number of 2x2 pooling operations (Min: 1). Results in (MODEL_POOL_COUNT + 1) resolution levels.
ADD_DISP_OUTPUT = True
DISP_CHANNEL_COUNT = 2 # Displacement map channel count (0: i, 1: j)
ADD_SEG_OUTPUT = True
SEG_CHANNEL_COUNT = 4 # Segmentation channel count (0: background, 1: area, 2: edge, 3: vertex)
# --- --- #
# Losses
# Implicitly we have DISP_POLYGON_BACKGROUND_COEF = 0.0
DISP_POLYGON_FILL_COEF = 0.1
DISP_POLYGON_OUTLINE_COEF = 1
DISP_POLYGON_VERTEX_COEF = 10
SEG_BACKGROUND_COEF = 0.05
SEG_POLYGON_FILL_COEF = 0.1
SEG_POLYGON_OUTLINE_COEF = 1
SEG_POLYGON_VERTEX_COEF = 10
DISP_LOSS_COEF = 100
SEG_LOSS_COEF = 50
LAPLACIAN_PENALTY_COEF = 0 # Default: 10000 # TODO: experiment again with non-zero values (Now that the Laplacian penalty bug is fixed)
# Each level's prediction has a different loss coefficient that can also be changed over time
# Note: len(LEVEL_LOSS_COEFS_PARAMS) must be equal to MODEL_POOL_COUNT
# Note: There are (MODEL_POOL_COUNT + 1) resolution levels in total but the last level does not have prediction outputs
# to compute a level loss on (it is the bottom of the "U" of the U-Net)
# Note: Values must be floats
LEVEL_LOSS_COEFS_PARAMS = [
# Level 0, same resolution as input image
{
"boundaries": [2500, 5000, 7500],
"values": [0.50, 0.75, 0.9, 1.0]
},
{
"boundaries": [2500, 5000, 7500],
"values": [0.35, 0.20, 0.1, 0.0]
},
{
"boundaries": [2500, 5000, 7500],
"values": [0.15, 0.05, 0.0, 0.0]
},
]
# LEVEL_LOSS_COEFS_PARAMS = [
# # Level 0, same resolution as input image
# {
# "boundaries": [2500, 5000, 7500],
# "values": [1.0, 1.0, 1.0, 1.0]
# },
# {
# "boundaries": [2500, 5000, 7500],
# "values": [0.0, 0.0, 0.0, 0.0]
# },
# {
# "boundaries": [2500, 5000, 7500],
# "values": [0.0, 0.0, 0.0, 0.0]
# },
# ]
# LEVEL_LOSS_COEFS_PARAMS = [
# # Level 0, same resolution as input image
# {
# "boundaries": [2500, 5000, 7500],
# "values": [1.0, 1.0, 1.0, 1.0]
# },
# {
# "boundaries": [2500, 5000, 7500],
# "values": [0.0, 0.0, 0.0, 0.0]
# },
# ]
assert len(LEVEL_LOSS_COEFS_PARAMS) == POOL_COUNT, \
"LEVEL_LOSS_COEFS_PARAMS ({} elements) must have MODEL_RES_LEVELS ({}) elements".format(
len(LEVEL_LOSS_COEFS_PARAMS), POOL_COUNT)
# Training
PLOT_RESULTS = False # Is extremely slow when True inside Docker...
BASE_LEARNING_RATE = 1e-4
LEARNING_RATE_PARAMS = {
"boundaries": [25000],
"values": [BASE_LEARNING_RATE, 0.5 * BASE_LEARNING_RATE]
}
WEIGHT_DECAY = 1e-4 # Default: 1e-6
DROPOUT_KEEP_PROB = 1.0
MAX_ITER = 100000
TRAIN_SUMMARY_STEP = 250
VAL_SUMMARY_STEP = 1000
CHECKPOINT_STEP = 1000
# Outputs
MODEL_NAME = "mapalign_mutlires"
RUNS_DIR = os.path.join(PROJECT_DIR, "runs")
LOGS_DIRNAME = "logs"
CHECKPOINTS_DIRNAME = "checkpoints"
| 4,751 | 31.326531 | 137 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/config_test_bradbury_buildings.py | import os
import numpy as np
import config
DATASET_RAW_DIR = os.path.join(config.DATA_DIR, "bradbury_buildings_roads_height_dataset/raw")
# IMAGES = [
# {
# "city": "SanFrancisco",
# "number": 1,
# },
# {
# "city": "SanFrancisco",
# "number": 2,
# },
# {
# "city": "SanFrancisco",
# "number": 3,
# },
# ]
IMAGES = [
{
"city": "Norfolk",
"number": 1,
},
{
"city": "Norfolk",
"number": 2,
},
# Too few buildings for accuracy measurement:
# {
# "city": "Norfolk",
# "number": 3,
# },
]
# Displacement map
DISP_MAP_PARAMS = {
"disp_map_count": 10,
"disp_modes": 30, # Number of Gaussians mixed up to make the displacement map (Default: 20)
"disp_gauss_mu_range": [0, 1], # Coordinates are normalized to [0, 1] before the function is applied
"disp_gauss_sig_scaling": [0.0, 0.002], # Coordinates are normalized to [0, 1] before the function is applied
"disp_max_abs_value": 32,
}
# Model
BATCH_SIZE = 12
MODEL_DISP_MAX_ABS_VALUE = 4
THRESHOLDS = np.arange(0, 16.25, 0.25)
OUTPUT_SHAPEFILES = False # Bradbury images are not geo-localized
OUTPUT_DIR = "test.accv2018/bradbury_buildings"
DISP_MAPS_DIR = OUTPUT_DIR + ".disp_maps"
| 1,304 | 21.5 | 114 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/6_test_building_height_estimation.py | import os.path
import numpy as np
import matplotlib.pyplot as plt
# --- Params --- #
BINS = 50
INPUT_BASE_DIRPATH = "3d_buildings/leibnitz"
# --- --- #
def compute_polygon_area(polygon):
return 0.5 * np.abs(
np.dot(polygon[:, 0], np.roll(polygon[:, 1], 1)) - np.dot(polygon[:, 1], np.roll(polygon[:, 0], 1)))
def main(input_base_dirpath, bins):
# --- Loading data --- #
polygon_array = np.load(os.path.join(input_base_dirpath, "polygons.npy"))
gt_heights_array = np.load(os.path.join(input_base_dirpath, "gt_heights.npy"))
pred_heights_array = np.load(os.path.join(input_base_dirpath, "pred_heights.npy"))
# # Exclude buildings with pred_height < 3:
# keep_indices = np.where(3 <= pred_heights_array)
# polygon_array = polygon_array[keep_indices]
# gt_heights_array = gt_heights_array[keep_indices]
# pred_heights_array = pred_heights_array[keep_indices]
mean_gt_height = np.mean(gt_heights_array)
print("mean_gt_height:")
print(mean_gt_height)
mean_pred_height = np.mean(pred_heights_array)
print("mean_pred_height:")
print(mean_pred_height)
diff_array = np.abs(gt_heights_array - pred_heights_array)
mean_diff = np.mean(diff_array)
print("mean_diff:")
print(mean_diff)
# --- Plot area/height pairs --- #
polygon_area_list = [compute_polygon_area(polygon) for polygon in polygon_array]
plt.scatter(polygon_area_list, diff_array, s=1)
# plt.scatter(polygon_area_list, pred_heights_array, s=1)
plt.xlabel('Area')
plt.xlim([0, 1000])
plt.ylabel('Height difference')
plt.title('Height difference relative to area')
plt.grid(True)
plt.show()
# --- Plot histograms --- #
# pred_heights_array_int = np.round(pred_heights_array).astype(int)
plt.hist(gt_heights_array, bins, alpha=0.5)
plt.hist(pred_heights_array, bins, alpha=0.5)
plt.xlabel('Height')
plt.ylabel('Count')
plt.title('Histogram of building heights')
plt.grid(True)
plt.show()
# --- Measure results --- #
if __name__ == "__main__":
main(INPUT_BASE_DIRPATH, BINS)
| 2,115 | 27.213333 | 108 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/multires_pipeline.py | import sys
import skimage.transform
import skimage.io
import numpy as np
import model
sys.path.append("../../utils")
import run_utils
import polygon_utils
import print_utils
def rescale_data(image, polygons, scale):
downsampled_image = skimage.transform.rescale(image, scale, order=3, preserve_range=True, multichannel=True, anti_aliasing=True)
downsampled_image = downsampled_image.astype(image.dtype)
downsampled_polygons = polygon_utils.rescale_polygon(polygons, scale)
return downsampled_image, downsampled_polygons
def downsample_data(image, metadata, polygons, factor, reference_pixel_size):
corrected_factor = factor * reference_pixel_size / metadata["pixelsize"]
scale = 1 / corrected_factor
downsampled_image, downsampled_polygons = rescale_data(image, polygons, scale)
return downsampled_image, downsampled_polygons
def upsample_data(image, metadata, polygons, factor, reference_pixel_size):
# TODO: test with metadata["pixelsize"] != config.REFERENCE_PIXEL_SIZE
corrected_factor = factor * reference_pixel_size / metadata["pixelsize"]
upsampled_image, upsampled_polygons = rescale_data(image, polygons, corrected_factor)
return upsampled_image, upsampled_polygons
def inference(runs_dirpath, ori_image, ori_metadata, ori_disp_polygons, model_disp_max_abs_value, batch_size, scale_factor, run_name):
# Setup run dir and load config file
run_dir = run_utils.setup_run_dir(runs_dirpath, run_name)
_, checkpoints_dir = run_utils.setup_run_subdirs(run_dir)
config = run_utils.load_config(config_dirpath=run_dir)
# Downsample
image, disp_polygons = downsample_data(ori_image, ori_metadata, ori_disp_polygons, scale_factor, config["reference_pixel_size"])
spatial_shape = image.shape[:2]
# Draw displaced polygon map
# disp_polygons_to_rasterize = []
disp_polygons_to_rasterize = disp_polygons
disp_polygon_map = polygon_utils.draw_polygon_map(disp_polygons_to_rasterize, spatial_shape, fill=True, edges=True,
vertices=True)
# Compute output_res
output_res = model.MapAlignModel.get_output_res(config["input_res"], config["pool_count"])
# print("output_res: {}".format(output_res))
map_align_model = model.MapAlignModel(config["model_name"], config["input_res"],
config["add_image_input"], config["image_channel_count"],
config["image_feature_base_count"],
config["add_poly_map_input"], config["poly_map_channel_count"],
config["poly_map_feature_base_count"],
config["common_feature_base_count"], config["pool_count"],
config["add_disp_output"], config["disp_channel_count"],
config["add_seg_output"], config["seg_channel_count"],
output_res,
batch_size,
config["loss_params"],
config["level_loss_coefs_params"],
config["learning_rate_params"],
config["weight_decay"],
config["image_dynamic_range"], config["disp_map_dynamic_range_fac"],
model_disp_max_abs_value)
pred_field_map, segmentation_image = map_align_model.inference(image, disp_polygon_map, checkpoints_dir)
# --- align disp_polygon according to pred_field_map --- #
# print("# --- Align disp_polygon according to pred_field_map --- #")
aligned_disp_polygons = disp_polygons
# First remove polygons that are not fully inside the inner_image
padding = (spatial_shape[0] - pred_field_map.shape[0]) // 2
bounding_box = [padding, padding, spatial_shape[0] - padding, spatial_shape[1] - padding]
# aligned_disp_polygons = polygon_utils.filter_polygons_in_bounding_box(aligned_disp_polygons, bounding_box) # TODO: reimplement? But also filter out ori_gt_polygons for comparaison
aligned_disp_polygons = polygon_utils.transform_polygons_to_bounding_box_space(aligned_disp_polygons, bounding_box)
# Then apply displacement field map to aligned_disp_polygons
aligned_disp_polygons = polygon_utils.apply_disp_map_to_polygons(pred_field_map, aligned_disp_polygons)
# Restore polygons to original image space
bounding_box = [-padding, -padding, spatial_shape[0] + padding, spatial_shape[1] + padding]
aligned_disp_polygons = polygon_utils.transform_polygons_to_bounding_box_space(aligned_disp_polygons, bounding_box)
# Add padding to segmentation_image
final_segmentation_image = np.zeros((spatial_shape[0], spatial_shape[1], segmentation_image.shape[2]))
final_segmentation_image[padding:-padding, padding:-padding, :] = segmentation_image
# --- Upsample outputs --- #
# print("# --- Upsample outputs --- #")
final_segmentation_image, aligned_disp_polygons = upsample_data(final_segmentation_image, ori_metadata, aligned_disp_polygons, scale_factor, config["reference_pixel_size"])
return aligned_disp_polygons, final_segmentation_image
def multires_inference(runs_dirpath, ori_image, ori_metadata, ori_disp_polygons, model_disp_max_abs_value, batch_size, ds_fac_list, run_name_list):
"""
Returns the last segmentation image that was computed (from the finest resolution)
:param ori_image:
:param ori_metadata:
:param ori_disp_polygons:
:param model_disp_max_abs_value:
:param ds_fac_list:
:param run_name_list:
:return:
"""
aligned_disp_polygons = ori_disp_polygons # init
segmentation_image = None
# Launch the resolution chain pipeline:
for index, (ds_fac, run_name) in enumerate(zip(ds_fac_list, run_name_list)):
print("# --- downsampling_factor: {} --- #".format(ds_fac))
try:
aligned_disp_polygons, segmentation_image = inference(runs_dirpath, ori_image, ori_metadata, aligned_disp_polygons, model_disp_max_abs_value, batch_size, ds_fac, run_name)
except ValueError as e:
print_utils.print_warning(str(e))
return aligned_disp_polygons, segmentation_image
| 6,435 | 47.390977 | 186 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_aerial_image.1_generate_disps.py | import sys
import os
import numpy as np
from jsmin import jsmin
import json
import test
sys.path.append("../../../data/AerialImageDataset")
import read
sys.path.append("../../utils")
import math_utils
import run_utils
import python_utils
# --- Params --- #
CONFIG_NAME = "config"
TEST_CONFIG_NAME = "config.test.aerial_image"
# --- --- #
def generate_disp_maps(dataset_raw_dirpath, image_info, disp_map_params, thresholds, output_dir):
disp_map_filename_format = "{}.disp_{:02d}.disp_map.npy"
accuracies_filename_format = "{}.disp_{:02d}.accuracy.npy"
# --- Load data --- #
ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data(dataset_raw_dirpath, image_info["city"], image_info["number"])
image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
print("image_name: {}".format(image_name))
spatial_shape = ori_image.shape[:2]
ori_normed_disp_field_maps = math_utils.create_displacement_field_maps(spatial_shape,
disp_map_params["disp_map_count"],
disp_map_params["disp_modes"],
disp_map_params["disp_gauss_mu_range"],
disp_map_params["disp_gauss_sig_scaling"])
disp_polygons_list = test.generate_disp_data(ori_normed_disp_field_maps, ori_gt_polygons, disp_map_params["disp_max_abs_value"])
# Save disp maps and accuracies individually
for i, (ori_normed_disp_field_map, disp_polygons) in enumerate(zip(ori_normed_disp_field_maps, disp_polygons_list)):
disp_map_filename = disp_map_filename_format.format(image_name, i)
disp_map_filepath = os.path.join(output_dir, disp_map_filename)
np.save(disp_map_filepath, ori_normed_disp_field_map)
accuracies_filename = accuracies_filename_format.format(image_name, i)
accuracies_filepath = os.path.join(output_dir, accuracies_filename)
integer_thresholds = [threshold for threshold in thresholds if (int(threshold) == threshold)]
accuracies = test.measure_accuracies(ori_gt_polygons, disp_polygons, integer_thresholds, accuracies_filepath)
def main():
# load config file
config = run_utils.load_config(CONFIG_NAME)
config_test = run_utils.load_config(TEST_CONFIG_NAME)
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
dataset_raw_dirpath = os.path.join(data_dir, config_test["dataset_raw_partial_dirpath"])
if not os.path.exists(config_test["disp_maps_dir"]):
os.makedirs(config_test["disp_maps_dir"])
for images_info in config_test["images_info_list"]:
for number in images_info["numbers"]:
image_info = {
"city": images_info["city"],
"number": number,
}
generate_disp_maps(dataset_raw_dirpath, image_info, config_test["disp_map_params"], config_test["thresholds"], config_test["disp_maps_dir"])
if __name__ == '__main__':
main()
| 3,360 | 39.987805 | 152 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_aerial_image.align_osm_gt.py | import sys
import os
import test
# CHANGE to you own test config file:
import config_test_inria as config_test
# CHANGE to the path of your own read.py script:
sys.path.append("../../../data/AerialImageDataset")
import read
# --- Params --- #
# Iteratively use these downsampling factors (should be in descending order):
DS_FAC_LIST = [
8,
4,
2,
1,
]
# Name of the runs to use (in the same order as the DS_FAC_LIST list):
RUN_NAME_LIST = [
"ds_fac_8",
"ds_fac_4",
"ds_fac_2",
"ds_fac_1",
]
assert len(DS_FAC_LIST) == len(RUN_NAME_LIST), "DS_FAC_LIST and RUN_NAME_LIST should have the same length (and match)"
OUTPUT_DIR = config_test.OUTPUT_DIR + ".align" + ".ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1"
# --- --- #
def test_image(image_info, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir):
# --- Load data --- #
# CHANGE the arguments of the load_gt_data() function if using your own and it does not take the same arguments:
ori_image, ori_metadata, ori_disp_polygons = read.load_gt_data(config_test.DATASET_RAW_DIR, image_info["city"], image_info["number"])
# CHANGE the arguments of the IMAGE_NAME_FORMAT format string if using your own and it does not take the same arguments:
image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
ori_gt_polygons = []
test.test(ori_image, ori_metadata, ori_gt_polygons, ori_disp_polygons, batch_size, ds_fac_list, run_name_list,
model_disp_max_abs_value, thresholds, test_output_dir, image_name, output_shapefiles=config_test.OUTPUT_SHAPEFILES)
def main():
if not os.path.exists(OUTPUT_DIR):
os.makedirs(OUTPUT_DIR)
for image_info in config_test.IMAGES:
test_image(image_info, config_test.BATCH_SIZE, DS_FAC_LIST,
RUN_NAME_LIST, config_test.MODEL_DISP_MAX_ABS_VALUE, config_test.THRESHOLDS, OUTPUT_DIR)
if __name__ == '__main__':
main()
| 1,996 | 32.283333 | 137 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/4_compute_building_heights.py | import os.path
import sys
import math
import itertools
import numpy as np
import config
sys.path.append("../../utils")
import geo_utils
# --- Params --- #
DATASET_DIR = os.path.join(config.PROJECT_DIR, "../../../data/stereo_dataset")
RAW_DIR = os.path.join(DATASET_DIR, "raw/leibnitz")
INPUT_DIR = "test/stereo_dataset_real_displacements.align.ds_fac_8.ds_fac_4.ds_fac_2"
VIEW_INFO_LIST = [
{
"image_name": "leibnitz_ortho_ref",
"image_filepath": os.path.join(RAW_DIR, "leibnitz_ortho_ref_RGB.tif"),
"shapefile_filepath": os.path.join(INPUT_DIR, "leibnitz_ref_rec.aligned_polygons.shp"),
# "shapefile_filepath": os.path.join(INPUT_DIR, "leibnitz_rec_ref.ori_polygons.shp"), # GT polygons
"angle": 76.66734850675575 * math.pi / 180, # Elevation
},
{
"image_name": "leibnitz_ortho_rec",
"image_filepath": os.path.join(RAW_DIR, "leibnitz_ortho_rec_RGB.tif"),
"shapefile_filepath": os.path.join(INPUT_DIR, "leibnitz_rec_ref.aligned_polygons.shp"),
# "shapefile_filepath": os.path.join(INPUT_DIR, "leibnitz_ref_rec.ori_polygons.shp"), # GT polygons
"angle": 69.62096370829768 * math.pi / 180, # Elevation
},
]
PIXELSIZE = 0.5 # 1 pixel is 0.5 meters
OUTPUT_BASE_DIRPATH = "3d_buildings/leibnitz"
# --- --- #
def compute_heights(view_1, view_2, pixelsize):
tan_1 = math.tan(view_1["angle"])
tan_2 = math.tan(view_2["angle"])
tan_alpha = min(tan_1, tan_2)
tan_beta = max(tan_1, tan_2)
angle_height_coef = tan_alpha * tan_beta / (tan_beta - tan_alpha)
heights = []
for polygon_1, polygon_2 in zip(view_1["polygon_list"], view_2["polygon_list"]):
center_1 = np.mean(polygon_1, axis=0, keepdims=True)
center_2 = np.mean(polygon_2, axis=0, keepdims=True)
distance = np.sqrt(np.sum(np.square(center_1 - center_2), axis=1))[0]
height = distance * angle_height_coef * pixelsize
heights.append(height)
return heights
def main(view_info_list, pixelsize, output_base_dirpath):
# --- Loading shapefiles --- #
print("# --- Loading shapefiles --- #")
view_list = []
for view_info in view_info_list:
polygon_list, properties_list = geo_utils.get_polygons_from_shapefile(view_info["image_filepath"],
view_info["shapefile_filepath"])
view = {
"polygon_list": polygon_list,
"properties_list": properties_list,
"angle": view_info["angle"],
}
view_list.append(view)
# Extract ground truth building heights
gt_heights = []
for properties in view_list[0]["properties_list"]:
gt_heights.append(properties["HEIGHT"])
gt_heights_array = np.array(gt_heights)
# Iterate over all possible pairs of views:
heights_list = []
view_pair_list = itertools.combinations(view_list, 2)
for view_pair in view_pair_list:
heights = compute_heights(view_pair[0], view_pair[1], pixelsize)
heights_list.append(heights)
# Average results from pairs
heights_list_array = np.array(heights_list)
pred_heights_array = np.mean(heights_list_array, axis=0)
# Correct pred heights:
pred_heights_array = pred_heights_array / 4.39 # Factor found with using the ground truth polygons for computing the height
# --- Save results --- #
polygon_list = view_list[0]["polygon_list"] # Take from the first view
# Save shapefile
output_shapefile_filepath = os.path.join(output_base_dirpath, view_info_list[0]["image_name"] + "_pred_heights.shp")
pred_properties_list = view_list[0]["properties_list"].copy() # First copy existing properties list
for i, pred_height in enumerate(pred_heights_array): # Then replace HEIGHT
pred_properties_list[i]["HEIGHT"] = pred_height
geo_utils.save_shapefile_from_polygons(view_list[0]["polygon_list"], view_info_list[0]["image_filepath"], output_shapefile_filepath, properties_list=pred_properties_list)
# Save for modeling buildings in Blender and measuring accuracy
scaled_polygon_list = [polygon * pixelsize for polygon in polygon_list]
np.save(os.path.join(output_base_dirpath, "polygons.npy"), scaled_polygon_list)
np.save(os.path.join(output_base_dirpath, "gt_heights.npy"), gt_heights_array)
np.save(os.path.join(output_base_dirpath, "pred_heights.npy"), pred_heights_array)
if __name__ == "__main__":
main(VIEW_INFO_LIST, PIXELSIZE, OUTPUT_BASE_DIRPATH)
| 4,526 | 40.154545 | 174 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/download_pretrained.py | import os.path
import urllib.request
import zipfile
# --- Params --- #
ressource_filename_list = ["runs.igarss2019.zip"]
ressource_dirpath_url = "https://www-sop.inria.fr/members/Nicolas.Girard/downloads/mapalignment"
script_filepath = os.path.realpath(__file__)
zip_download_dirpath = os.path.join(os.path.dirname(script_filepath), "runs.zip")
download_dirpath = os.path.join(os.path.dirname(script_filepath), "runs")
# --- --- #
for ressource_filename in ressource_filename_list:
ressource_url = os.path.join(ressource_dirpath_url, ressource_filename)
print("Downloading zip from {}, please wait... (approx. 406MB to download)".format(ressource_url))
urllib.request.urlretrieve(ressource_url, zip_download_dirpath)
print("Extracting zip...")
zip_ref = zipfile.ZipFile(zip_download_dirpath, 'r')
os.makedirs(download_dirpath)
zip_ref.extractall(download_dirpath)
zip_ref.close()
os.remove(zip_download_dirpath)
| 953 | 35.692308 | 102 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/1_train.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import tensorflow as tf
import os
import model
sys.path.append(os.path.join("../dataset_utils"))
import dataset_multires
sys.path.append("../../utils")
import python_utils
import run_utils
# --- Command-line FLAGS --- #
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('config', "config",
"Name of the config file, excluding the .json file extension")
flags.DEFINE_boolean('new_run', False,
"Train from scratch (when True) or train from the last checkpoint (when False)")
flags.DEFINE_string('init_run_name', None,
"This is the run_name to initialize the weights from. "
"If None, weights will be initialized randomly."
"This is a single word, without the timestamp.")
flags.DEFINE_string('run_name', None,
"Continue training from run_name. This is a single word, without the timestamp.")
# If not specified, the last run is used (unless new_run is True or no runs are in the runs directory).
# If new_run is True, creates the new run with name equal run_name.
flags.DEFINE_integer('batch_size', 8, "Batch size. Generally set as large as the VRAM can handle.")
flags.DEFINE_integer('ds_fac', 8, "Downsampling factor. Choose from which resolution sub-dataset to train on.")
# Some examples:
# On Quadro M2200, 4GB VRAM: python 1_train.py --new_run --run_name=ds_fac_8 --batch_size 4 --ds_fac 8
# On Quadro M2200, 4GB VRAM: python 1_train.py --new_run --init_run_name=ds_fac_8 --run_name=ds_fac_4_with_init --batch_size 4 --ds_fac_4
# On Quadro M2200, 4GB VRAM: python 1_train.py --new_run --batch_size 4 --ds_fac 2
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --new_run --run_name=ds_fac_8_no_seg --batch_size 32 --ds_fac 8
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --new_run --run_name=ds_fac_4_no_seg --batch_size 32 --ds_fac 4
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --new_run --init_run_name=ds_fac_4_double --run_name=ds_fac_8_double --batch_size 32 --ds_fac 8
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --new_run --init_run_name=ds_fac_4_double --run_name=ds_fac_2_double --batch_size 32 --ds_fac 2
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --new_run --init_run_name=ds_fac_1_double --run_name=ds_fac_1_double_seg --batch_size 32 --ds_fac 1
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --run_name=ds_fac_8_double --batch_size 32 --ds_fac 8
# On GTX 1080 Ti, 11GB VRAM: python 1_train.py --run_name=ds_fac_2_double --batch_size 32 --ds_fac 2
# --- --- #
def train(config, tfrecords_dirpath_list, init_run_dirpath, run_dirpath, batch_size, ds_fac_list, ds_repeat_list):
# setup init checkpoints directory path if one is specified:
if init_run_dirpath is not None:
_, init_checkpoints_dirpath = run_utils.setup_run_subdirs(init_run_dirpath, config["logs_dirname"],
config["checkpoints_dirname"])
else:
init_checkpoints_dirpath = None
# setup stage run dirs
# create run subdirectories if they do not exist
logs_dirpath, checkpoints_dirpath = run_utils.setup_run_subdirs(run_dirpath, config["logs_dirname"],
config["checkpoints_dirname"])
# compute output_res
output_res = model.MapAlignModel.get_output_res(config["input_res"], config["pool_count"])
print("output_res: {}".format(output_res))
# instantiate model object (resets the default graph)
map_align_model = model.MapAlignModel(config["model_name"], config["input_res"],
config["add_image_input"], config["image_channel_count"],
config["image_feature_base_count"],
config["add_poly_map_input"], config["poly_map_channel_count"],
config["poly_map_feature_base_count"],
config["common_feature_base_count"], config["pool_count"],
config["add_disp_output"], config["disp_channel_count"],
config["add_seg_output"], config["seg_channel_count"],
output_res,
batch_size,
config["loss_params"],
config["level_loss_coefs_params"],
config["learning_rate_params"],
config["weight_decay"],
config["image_dynamic_range"], config["disp_map_dynamic_range_fac"],
config["disp_max_abs_value"])
# train dataset
train_dataset_filename_list = dataset_multires.create_dataset_filename_list(tfrecords_dirpath_list,
config["tfrecord_filename_format"],
ds_fac_list,
dataset="train",
resolution_file_repeats=ds_repeat_list)
train_dataset_tensors = dataset_multires.read_and_decode(
train_dataset_filename_list,
output_res,
config["input_res"],
batch_size,
config["image_dynamic_range"],
disp_map_dynamic_range_fac=config["disp_map_dynamic_range_fac"],
keep_poly_prob=config["keep_poly_prob"],
data_aug=config["data_aug"],
train=True)
if config["perform_validation_step"]:
# val dataset
val_dataset_filename_list = dataset_multires.create_dataset_filename_list(tfrecords_dirpath_list,
config["tfrecord_filename_format"],
ds_fac_list,
dataset="val",
resolution_file_repeats=ds_repeat_list)
val_dataset_tensors = dataset_multires.read_and_decode(
val_dataset_filename_list,
output_res,
config["input_res"],
batch_size,
config["image_dynamic_range"],
disp_map_dynamic_range_fac=config["disp_map_dynamic_range_fac"],
keep_poly_prob=config["keep_poly_prob"],
data_aug=False,
train=False)
else:
val_dataset_tensors = None
# launch training
map_align_model.optimize(train_dataset_tensors, val_dataset_tensors,
config["max_iter"], config["dropout_keep_prob"],
logs_dirpath, config["train_summary_step"], config["val_summary_step"],
checkpoints_dirpath, config["checkpoint_step"],
init_checkpoints_dirpath=init_checkpoints_dirpath,
plot_results=config["plot_results"])
def main(_):
working_dir = os.path.dirname(os.path.abspath(__file__))
# print FLAGS
print("#--- FLAGS: ---#")
print("config: {}".format(FLAGS.config))
print("new_run: {}".format(FLAGS.new_run))
print("init_run_name: {}".format(FLAGS.init_run_name))
print("run_name: {}".format(FLAGS.run_name))
print("batch_size: {}".format(FLAGS.batch_size))
print("ds_fac: {}".format(FLAGS.ds_fac))
# load config file
config = run_utils.load_config(FLAGS.config)
# Check config setting coherences
assert len(config["level_loss_coefs_params"]) == config["pool_count"], \
"level_loss_coefs_params ({} elements) must have model_res_levels ({}) elements".format(
len(config["level_loss_coefs_params"]), config["pool_count"])
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
# Setup dataset dirpaths
tfrecords_dirpath_list = [os.path.join(data_dir, tfrecords_dirpath) for tfrecords_dirpath in
config["tfrecords_partial_dirpath_list"]]
# Overwrite config ds_fac if FLAGS specify them
if FLAGS.ds_fac is not None:
ds_fac_list = [FLAGS.ds_fac]
ds_repeat_list = [1]
else:
ds_fac_list = config["ds_fac_list"]
ds_repeat_list = config["ds_repeat_list"]
# setup init run directory of one is specified:
if FLAGS.init_run_name is not None:
init_run_dirpath = run_utils.setup_run_dir(config["runs_dirname"], FLAGS.init_run_name)
else:
init_run_dirpath = None
# setup run directory:
runs_dir = os.path.join(working_dir, config["runs_dirname"])
current_run_dirpath = run_utils.setup_run_dir(runs_dir, FLAGS.run_name, FLAGS.new_run)
# save config in logs directory
run_utils.save_config(config, current_run_dirpath)
# save FLAGS
FLAGS_filepath = os.path.join(current_run_dirpath, "FLAGS.json")
python_utils.save_json(FLAGS_filepath, {
"run_name": FLAGS.run_name,
"new_run": FLAGS.new_run,
"batch_size": FLAGS.batch_size,
"ds_fac": FLAGS.ds_fac,
})
train(config, tfrecords_dirpath_list, init_run_dirpath, current_run_dirpath, FLAGS.batch_size, ds_fac_list,
ds_repeat_list)
if __name__ == '__main__':
tf.app.run(main=main)
| 9,943 | 44.2 | 146 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/3_test_plot.2_align.py | import sys
import os
import matplotlib.pyplot as plt
import numpy as np
sys.path.append("../../utils")
import python_utils
# --- Params --- #
ACCURACIES_FILENAME_EXTENSION = ".accuracy.npy"
SOURCE_PARAMS_LIST = [
# # --- Stereo real disps --- #
# {
# "name": "Aligned image 1",
# "path": "test.accv2018/stereo_dataset_real_displacements.align.ds_fac_8.ds_fac_4.ds_fac_2.image_ref",
# "plot_color": "royalblue"
# },
# {
# "name": "Aligned image 2",
# "path": "test.accv2018/stereo_dataset_real_displacements.align.ds_fac_8.ds_fac_4.ds_fac_2.image_rec",
# "plot_color": "seagreen"
# },
#
# # --- Stereo real disps no align --- #
#
# {
# "name": "No alignment",
# "path": "test.accv2018/stereo_dataset_real_displacements.noalign",
# "plot_color": "gray"
# },
# --- New/Old training (without/with SanFrancisco in train set) --- #
# {
# "name": "Aligned SanFrancisco After",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.SanFrancisco.new",
# "plot_color": "royalblue"
# },
# {
# "name": "Aligned SanFrancisco Before",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.SanFrancisco.old",
# "plot_color": "green"
# },
#
# {
# "name": "Aligned Norfolk After",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.Norfolk.new",
# "plot_color": "orange"
# },
# {
# "name": "Aligned Norfolk Before",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.Norfolk.old",
# "plot_color": "tomato"
# },
# --- Individual images --- #
# {
# "name": "Aligned SanFrancisco_01",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.SanFrancisco_01",
# "plot_color": "royalblue"
# },
# {
# "name": "Aligned SanFrancisco_02",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.SanFrancisco_02",
# "plot_color": "seagreen"
# },
# {
# "name": "Aligned SanFrancisco_03",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.SanFrancisco_03",
# "plot_color": "tomato"
# },
# {
# "name": "Aligned Norfolk_01 ds_fac=8",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.Norfolk_01",
# "plot_color": "orange"
# },
# {
# "name": "Aligned Norfolk_01 ds_fac=8,4",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.ds_fac_4.Norfolk_01",
# "plot_color": "orange"
# },
# {
# "name": "Aligned Norfolk_01 ds_fac=8,4,2",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.Norfolk_01",
# "plot_color": "orange"
# },
# {
# "name": "Aligned Norfolk_01 ds_fac=8,4,2,1",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.Norfolk_01",
# "plot_color": "orange"
# },
# {
# "name": "Aligned Norfolk_02 ds_fac=8",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.Norfolk_02",
# "plot_color": "green"
# },
# {
# "name": "Aligned Norfolk_02 ds_fac=8,4",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.ds_fac_4.Norfolk_02",
# "plot_color": "green"
# },
# {
# "name": "Aligned Norfolk_02 ds_fac=8,4,2",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.Norfolk_02",
# "plot_color": "green"
# },
# {
# "name": "Aligned Norfolk_02 ds_fac=8,4,2,1",
# "path": "test.igarss2019/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.Norfolk_02",
# "plot_color": "green"
# },
# {
# "name": "Aligned bellingham21 ds_fac=8",
# "path": "test.igarss2019/inria.align.ds_fac_8.bellingham21",
# "plot_color": "skyblue"
# },
# {
# "name": "Aligned bellingham21 ds_fac=8,4,2,1",
# "path": "test.igarss2019/inria.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1.bellingham21",
# "plot_color": "skyblue"
# },
# {
# "name": "Not aligned SanFrancisco_01",
# "path": "test.accv2018/bradbury_buildings.disp_maps.SanFrancisco_01",
# "plot_color": "royalblue",
# "plot_dashes": (6, 1),
# },
# {
# "name": "Not aligned SanFrancisco_02",
# "path": "test.accv2018/bradbury_buildings.disp_maps.SanFrancisco_02",
# "plot_color": "seagreen",
# "plot_dashes": (6, 1),
# },
# {
# "name": "Not aligned SanFrancisco_03",
# "path": "test.accv2018/bradbury_buildings.disp_maps.SanFrancisco_03",
# "plot_color": "tomato",
# "plot_dashes": (6, 1),
# },
# {
# "name": "Not aligned Norfolk_01",
# "path": "test/bradbury_buildings.no_align_accuracies.Norfolk_01",
# "plot_color": "orange",
# "plot_dashes": (6, 1),
# },
# {
# "name": "Not aligned Norfolk_02",
# "path": "test/bradbury_buildings.no_align_accuracies.Norfolk_02",
# "plot_color": "green",
# "plot_dashes": (6, 1),
# },
# {
# "name": "Not aligned Bellingham21",
# "path": "test/inria.no_align_accuracies.bellingham21",
# "plot_color": "skyblue",
# "plot_dashes": (6, 1),
# },
# --- Ablation studies and comparison --- #
# {
# "name": "No dropping of input polygons",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_keep_poly_1.ds_fac_4_keep_poly_1.ds_fac_2_keep_poly_1.ds_fac_1_keep_poly_1",
# "plot_color": "tomato"
# },
# {
# "name": "Zampieri et al.",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_zampieri.ds_fac_4_zampieri.ds_fac_2_zampieri.ds_fac_1_zampieri",
# "plot_color": "black"
# },
# {
# "name": "Full method",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1",
# "plot_color": "royalblue"
# },
# {
# "name": "Full method ds_fac >= 2",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2",
# "plot_color": "orange"
# },
# {
# "name": "Full method ds_fac >= 4",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4",
# "plot_color": "darkorchid"
# },
# {
# "name": "Full method ds_fac = 8",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8",
# "plot_color": "green"
# },
# {
# "name": "No segmentation branch",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_no_seg.ds_fac_4_no_seg.ds_fac_2_no_seg.ds_fac_1_no_seg",
# "plot_color": "orange"
# },
# {
# "name": "No intermediary losses",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_no_interm_loss.ds_fac_4_no_interm_loss.ds_fac_2_no_interm_loss.ds_fac_1_no_interm_loss",
# "plot_color": "darkorchid"
# },
# # --- Comparison to Quicksilver --- #
#
# {
# "name": "Our model (scaling = 4)",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_4_disp_max_16",
# "plot_color": "blue"
# },
# {
# "name": "Quicksilver (scaling = 4)",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_4_disp_max_16_quicksilver",
# "plot_color": "seagreen"
# },
#
# # --- Bradbury buildings no align --- #
# {
# "name": "No alignment",
# "path": "test.accv2018/bradbury_buildings.disp_maps",
# "plot_color": "gray"
# },
# # --- Adding the Mapping Challenge (from Crowd AI) dataset --- #
# {
# "name": "ds_fac_8",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8",
# "plot_color": "blue"
# },
# {
# "name": "ds_fac_8_zampieri",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_zampieri",
# "plot_color": "black"
# },
# {
# "name": "ds_fac_8_bradbury",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_bradbury",
# "plot_color": "seagreen"
# },
# {
# "name": "ds_fac_8_inria",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_inria",
# "plot_color": "tomato"
# },
# {
# "name": "ds_fac_8_mapping",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_mapping",
# "plot_color": "orange"
# },
# {
# "name": "ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1",
# "plot_color": "royalblue"
# },
# --- Cross-dataset generalization --- #
# {
# "name": "Excluding Inria dataset in training",
# "path": "test.accv2018/inria.align.ds_fac_8_no_inria.ds_fac_4_no_inria.ds_fac_2_no_inria.ds_fac_1_no_inria",
# "plot_color": "royalblue"
# },
# {
# "name": "Including training set of Inria dataset",
# "path": "test.accv2018/inria.align.ds_fac_8_no_inria_test.ds_fac_4_no_inria_test.ds_fac_2_no_inria_test.ds_fac_1_no_inria_test",
# "plot_color": "red"
# },
# {
# "name": "Including 2.8% of Inria dataset",
# "path": "test.accv2018/inria.align.ds_fac_8_small_inria.ds_fac_4_small_inria.ds_fac_2_small_inria.ds_fac_1_small_inria",
# "plot_color": "green"
# },
# {
# "name": "Including 2.8% of training set of Inria dataset",
# "path": "test.accv2018/inria.align.ds_fac_8_small_inria_no_test.ds_fac_4_small_inria_no_test.ds_fac_2_small_inria_no_test.ds_fac_1_small_inria_no_test",
# "plot_color": "orange"
# },
# {
# "name": "No alignment",
# "path": "test.accv2018/inria.disp_maps",
# "plot_color": "gray"
# },
# --- Concatenation of intermediary outputs to features passed to the next level --- #
# {
# "name": "Full method",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8.ds_fac_4.ds_fac_2.ds_fac_1",
# "plot_color": "royalblue"
# },
# {
# "name": "Full method with concat interm outputs",
# "path": "test.accv2018/bradbury_buildings.align.ds_fac_8_concat_interm.ds_fac_4_concat_interm.ds_fac_2_concat_interm.ds_fac_1_concat_interm",
# "plot_color": "orange"
# },
# --- Align gt data with noisy supervision --- #
{
"name": "Original",
"path": "test.igarss2019/inria.align_gt.gt_polygons",
"plot_color": "#ff0000"
},
{
"name": "Round 1",
"path": "test.igarss2019/inria.align_gt.aligned_gt_polygons",
"plot_color": "#0000ff"
},
{
"name": "Round 2",
"path": "test.igarss2019/inria.align_gt.aligned_gt_polygons_1",
"plot_color": "#00ff00"
},
{
"name": "Round 3",
"path": "test.igarss2019/inria.align_gt.aligned_gt_polygons_2",
"plot_color": "#999999",
},
{
"name": "AS1: round 2",
"path": "test.igarss2019/inria.align_gt.aligned_gt_polygons_1_prev_aligned",
"plot_color": "#00bb00",
"linewidth": 0.5,
"marker": "+",
},
{
"name": "AS2: round 2",
"path": "test.igarss2019/inria.align_gt.aligned_gt_polygons_1_prev_aligned_no_retraining",
"plot_color": "#00bb00",
"linewidth": 0.5,
"marker": "x",
},
{
"name": "Noisier: original",
"path": "test.igarss2019/inria.align_gt.noisy_gt_polygons",
"plot_color": "#ff0000",
"plot_dashes": (6, 3),
},
{
"name": "Noisier: round 1",
"path": "test.igarss2019/inria.align_gt.aligned_noisy_gt_polygons",
"plot_color": "#0000ff",
"plot_dashes": (6, 3),
},
{
"name": "Noisier: round 2",
"path": "test.igarss2019/inria.align_gt.aligned_noisy_gt_polygons_1",
"plot_color": "#00ff00",
"plot_dashes": (6, 3),
},
{
"name": "Noisier: round 3",
"path": "test.igarss2019/inria.align_gt.aligned_noisy_gt_polygons_2",
"plot_color": "#999999",
"plot_dashes": (6, 3),
},
]
PLOT_ALL = False
PLOT_MIN_MAX = False
PLOT_AVERAGE = True
PLOT_STD = False
ALPHA_MAIN = 1.0
ALPHA_MIN_MAX = 0.5
ALPHA_STD = 0.125 / 2
ALPHA_INDIVIDUAL = 0.2 # Default: 0.2
COLOR = 'cornflowerblue'
X_LIM = 32 # Default: 12
FILEPATH = "test.igarss2019/accuracies.png"
# --- --- #
def main():
plt.figure(1, figsize=(7, 4))
handles = []
for source_params in SOURCE_PARAMS_LIST:
print("# --- {} --- #".format(source_params["name"]))
if "plot_dashes" in source_params:
plot_dashes = source_params["plot_dashes"]
else:
plot_dashes = (None, None)
if "linewidth" in source_params:
linewidth = source_params["linewidth"]
else:
linewidth = 1.5
if "marker" in source_params:
marker = source_params["marker"]
else:
marker = None
threshold_accuracies_filepath_list = python_utils.get_filepaths(source_params["path"], ACCURACIES_FILENAME_EXTENSION)
threshold_accuracies_list = []
for threshold_accuracies_filepath in threshold_accuracies_filepath_list:
threshold_accuracies = np.load(threshold_accuracies_filepath).item()
threshold_accuracies_list.append(threshold_accuracies)
# Plot main, min and max curves
accuracies_list = []
for threshold_accuracies in threshold_accuracies_list:
accuracies_list.append(threshold_accuracies["accuracies"])
accuracies_table = np.stack(accuracies_list, axis=0)
accuracies_min = np.min(accuracies_table, axis=0)
accuracies_average = np.mean(accuracies_table, axis=0)
accuracies_max = np.max(accuracies_table, axis=0)
accuracies_std = np.std(accuracies_table, axis=0)
accuracies_average_area = np.trapz(accuracies_average, threshold_accuracies_list[0]["thresholds"])
if PLOT_AVERAGE:
markers_on = range(0, len(accuracies_average), 4)
plt.plot(threshold_accuracies_list[0]["thresholds"], accuracies_average, color=source_params["plot_color"],
linewidth=linewidth, marker=marker, markevery=markers_on, dashes=plot_dashes, alpha=ALPHA_MAIN, label=source_params["name"])
print("Area under average curve = {}".format(accuracies_average_area))
if PLOT_MIN_MAX:
plt.plot(threshold_accuracies_list[0]["thresholds"], accuracies_min, color=source_params["plot_color"], dashes=(6, 1), alpha=ALPHA_MIN_MAX, label=source_params["name"])
plt.plot(threshold_accuracies_list[0]["thresholds"], accuracies_max, color=source_params["plot_color"], dashes=(6, 1), alpha=ALPHA_MIN_MAX, label=source_params["name"])
if PLOT_STD:
plt.fill_between(threshold_accuracies_list[0]["thresholds"], accuracies_average - accuracies_std, accuracies_average + accuracies_std,
color=source_params["plot_color"], alpha=ALPHA_STD, label=source_params["name"])
# plt.plot(threshold_accuracies_list[0]["thresholds"], accuracies_std, color=source_params["plot_color"],
# dashes=(6, 1), alpha=ALPHA_STD, label=source_params["name"])
if PLOT_ALL:
# Plot all curves:
for threshold_accuracies in threshold_accuracies_list:
plt.plot(threshold_accuracies["thresholds"], threshold_accuracies["accuracies"],
color=source_params["plot_color"], dashes=plot_dashes, alpha=ALPHA_INDIVIDUAL, label=source_params["name"])
# Legend
handles.append(plt.Line2D([0], [0], color=source_params["plot_color"], linewidth=linewidth, marker=marker, dashes=plot_dashes))
plt.grid(True)
axes = plt.gca()
axes.set_xlim([0, X_LIM])
axes.set_ylim([0.0, 1.0])
# plt.title("Fraction of vertices whose ground truth point distance is less than the threshold (higher is better)")
plt.xlabel('Threshold $\\tau$ (in pixels)')
plt.ylabel('Fraction of vertices')
# Add legends in top-left
labels = [source_params["name"] for source_params in SOURCE_PARAMS_LIST]
plt.legend(handles, labels, numpoints=None)
# Plot
plt.tight_layout()
plt.savefig(FILEPATH, dpi=300)
plt.show()
if __name__ == '__main__':
main()
| 16,830 | 35.430736 | 180 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_stereo.py | import sys
import os
import numpy as np
import config
import test
sys.path.append("../../../data/stereo_dataset")
import read
# --- Params --- #
DATASET_DIR = os.path.join(config.PROJECT_DIR, "../../../data/stereo_dataset")
FILE_PARAMS = {
"raw_dataset_dir": os.path.join(DATASET_DIR, "raw"),
"gt_views": ["rec", "ref"],
"image_name_suffix": "ortho",
"image_modes": ["RGB", "NIRRG"],
"image_extension": "tif",
"image_format": "{}_{}_{}_{}.{}", # To be used as IMAGE_FORMAT.format(name, image_name_suffix, gt_views[i], image_modes[j], image_extension)
"poly_name_capitalize": True, # If True, the gt name will be capitalised when building the gt filename to load
"poly_tag": "buildings",
"poly_extension": "shp",
"poly_format": "{}_{}_{}.{}", # To be used as IMAGE_FORMAT.format(capitalize(name), POLY_TAG, GT_VIEWS[i], IMAGE_EXTENSION)
}
TEST_IMAGES = ["leibnitz"]
# Displacement map
DISP_MAP_PARAMS = {
"disp_map_count": 1,
"disp_modes": 30, # Number of Gaussians mixed up to make the displacement map (Default: 20)
"disp_gauss_mu_range": [0, 1], # Coordinates are normalized to [0, 1] before the function is applied
"disp_gauss_sig_scaling": [0.0, 0.002], # Coordinates are normalized to [0, 1] before the function is applied
"disp_max_abs_value": 32,
}
# Models
BATCH_SIZE = 32
DS_FAC_LIST = [8, 4, 2] # Must be in descending order
# DS_FAC_LIST = [8, 4]
RUN_NAME_LIST = [
# "ds_fac_16",
"ds_fac_8_double",
"ds_fac_4_double",
"ds_fac_2_double_seg",
# "ds_fac_1_double_seg",
]
assert len(DS_FAC_LIST) == len(RUN_NAME_LIST), "DS_FAC_LIST and RUN_NAME_LIST should have the same length (and match)"
MODEL_DISP_MAX_ABS_VALUE = 4
# Both list should match and be in descending order of downsampling factor.
THRESHOLDS = np.arange(0, 16.5, 0.5)
TEST_OUTPUT_DIR = "test/stereo_dataset.ds_fac_8_double.ds_fac_4_double.ds_fac_2_double_seg.ds_fac_1_double_seg"
# --- --- #
def test_image(image_name, view, file_params, disp_map_params, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir):
# --- Load data --- #
ori_image, ori_metadata = read.load_image_data(image_name, view, file_params)
ori_gt_polygons = read.load_polygon_data(image_name, view, file_params)
# --- Test --- #
# Add view to the image name (otherwise the result of the last view will overwrite previous ones)
test_image_name = image_name + "_" + view
test.test_image_with_gt_polygons(test_image_name, ori_image, ori_metadata, ori_gt_polygons, disp_map_params, batch_size, ds_fac_list, run_name_list, model_disp_max_abs_value, thresholds, test_output_dir)
def main():
if not os.path.exists(TEST_OUTPUT_DIR):
os.makedirs(TEST_OUTPUT_DIR)
if not os.path.exists(TEST_OUTPUT_DIR + ".no_align"):
os.makedirs(TEST_OUTPUT_DIR + ".no_align")
for image_name in TEST_IMAGES:
for view in FILE_PARAMS["gt_views"]:
test_image(image_name, view, FILE_PARAMS, DISP_MAP_PARAMS, BATCH_SIZE, DS_FAC_LIST, RUN_NAME_LIST, MODEL_DISP_MAX_ABS_VALUE, THRESHOLDS, TEST_OUTPUT_DIR)
if __name__ == '__main__':
main()
| 3,177 | 35.953488 | 207 | py |
mapalignment | mapalignment-master/projects/mapalign/mapalign_multires/2_test_aerial_image.align_gt.py | import sys
import os
import tensorflow as tf
import numpy as np
import test
# CHANGE to the path of your own read.py script:
sys.path.append("../../../data/AerialImageDataset")
import read
sys.path.append("../../utils")
import run_utils
import python_utils
# --- Command-line FLAGS --- #
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('batch_size', None, "Batch size. Generally set as large as the VRAM can handle.")
# Some examples:
# On Quadro M2200, 4GB VRAM: python 2_test_aerial_image.align_gt.py --batch_size=12
# On GTX 1080 Ti, 11GB VRAM: python 2_test_aerial_image.align_gt.py --batch_size=32
# --- --- #
# --- Params --- #
# CHANGE to you own test config file:
TEST_CONFIG_NAME = "config.test.aerial_image.align_gt"
# Must be in descending order:
DS_FAC_LIST = [
8,
4,
2,
1,
]
RUNS_DIRPATH = "runs.igarss2019"
RUN_NAME_LIST = ["ds_fac_{}_noisy_inria_bradbury_all_2".format(ds_fac) for ds_fac in DS_FAC_LIST]
OUTPUT_DIRNAME_EXTENTION = "." + ".".join(RUN_NAME_LIST)
INPUT_POLYGONS_DIRNAME = "noisy_gt_polygons" # Set to None to use default gt polygons
ALIGNED_GT_POLYGONS_DIRNAME = "aligned_noisy_gt_polygons_2"
# --- --- #
def test_image(runs_dirpath, dataset_raw_dirpath, image_info, batch_size, ds_fac_list, run_name_list,
model_disp_max_abs_value, output_dir, output_shapefiles):
# --- Load data --- #
# CHANGE the arguments of the load_gt_data() function if using your own and it does not take the same arguments:
ori_image, ori_metadata, ori_gt_polygons = read.load_gt_data(dataset_raw_dirpath, image_info["city"],
image_info["number"])
if INPUT_POLYGONS_DIRNAME is not None:
gt_polygons = read.load_polygons(dataset_raw_dirpath, INPUT_POLYGONS_DIRNAME, image_info["city"], image_info["number"])
else:
gt_polygons = ori_gt_polygons
if gt_polygons is not None:
# CHANGE the arguments of the IMAGE_NAME_FORMAT format string if using your own and it does not take the same arguments:
image_name = read.IMAGE_NAME_FORMAT.format(city=image_info["city"], number=image_info["number"])
aligned_gt_polygons = test.test_align_gt(runs_dirpath, ori_image, ori_metadata, gt_polygons, batch_size,
ds_fac_list, run_name_list,
model_disp_max_abs_value, output_dir, image_name,
output_shapefiles=output_shapefiles)
# Save aligned_gt_polygons in dataset dir:
aligned_gt_polygons_filepath = read.get_polygons_filepath(dataset_raw_dirpath, ALIGNED_GT_POLYGONS_DIRNAME, image_info["city"], image_info["number"])
os.makedirs(os.path.dirname(aligned_gt_polygons_filepath), exist_ok=True)
np.save(aligned_gt_polygons_filepath, aligned_gt_polygons)
def main(_):
# load config file
config_test = run_utils.load_config(TEST_CONFIG_NAME)
# Handle FLAGS
if FLAGS.batch_size is not None:
batch_size = FLAGS.batch_size
else:
batch_size = config_test["batch_size"]
print("#--- Used params: ---#")
print("batch_size: {}".format(FLAGS.batch_size))
# Find data_dir
data_dir = python_utils.choose_first_existing_path(config_test["data_dir_candidates"])
if data_dir is None:
print("ERROR: Data directory not found!")
exit()
else:
print("Using data from {}".format(data_dir))
dataset_raw_dirpath = os.path.join(data_dir, config_test["dataset_raw_partial_dirpath"])
output_dir = config_test["align_dir"] + OUTPUT_DIRNAME_EXTENTION
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for images_info in config_test["images_info_list"]:
for number in images_info["numbers"]:
image_info = {
"city": images_info["city"],
"number": number,
}
test_image(RUNS_DIRPATH, dataset_raw_dirpath, image_info, batch_size, DS_FAC_LIST,
RUN_NAME_LIST, config_test["model_disp_max_abs_value"],
output_dir, config_test["output_shapefiles"])
if __name__ == '__main__':
tf.app.run(main=main)
| 4,273 | 34.915966 | 157 | py |
mapalignment | mapalignment-master/projects/utils/tf_utils.py | import tensorflow as tf
from tensorflow.python.framework.ops import get_gradient_function
import math
import numpy as np
def get_tf_version():
return tf.__version__
def bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def compute_current_adam_lr(optimizer):
# print(get_tf_version())
# a0, bb1, bb2 = optimizer._lr, optimizer._beta1_power, optimizer._beta2_power
# at = a0 * (1 - bb2) ** 0.5 / (1 - bb1)
# return at
return optimizer._lr # TODO: verify if this works
def count_number_trainable_params(trainable_variables=None):
"""
Counts the number of trainable variables.
"""
if trainable_variables is None:
trainable_variables = tf.trainable_variables()
tot_nb_params = 0
for trainable_variable in trainable_variables:
shape = trainable_variable.get_shape() # e.g [D,F] or [W,H,C]
current_nb_params = get_nb_params_shape(shape)
tot_nb_params = tot_nb_params + current_nb_params
return tot_nb_params
def get_nb_params_shape(shape):
"""
Computes the total number of params for a given shape.
Works for any number of shapes etc [D,F] or [W,H,C] computes D*F and W*H*C.
"""
nb_params = 1
for dim in shape:
nb_params = nb_params * int(dim)
return nb_params
def conv2d(x, W, stride=1, padding="SAME"):
"""conv2d returns a 2d convolution layer."""
return tf.nn.conv2d(x, W, strides=[1, stride, stride, 1], padding=padding)
def complete_conv2d(input_tensor, output_channels, kernel_size, stride=1, padding="SAME", activation=tf.nn.relu, bias_init_value=0.025,
std_factor=1, weight_decay=None, summary=False):
input_channels = input_tensor.get_shape().as_list()[-1]
output_channels = int(output_channels)
with tf.name_scope('W'):
w_conv = weight_variable([kernel_size[0], kernel_size[1], input_channels, output_channels], std_factor=std_factor, wd=weight_decay)
if summary:
variable_summaries(w_conv)
with tf.name_scope('bias'):
b_conv = bias_variable([output_channels], init_value=bias_init_value)
if summary:
variable_summaries(b_conv)
z_conv = conv2d(input_tensor, w_conv, stride=stride, padding=padding) + b_conv
if summary:
tf.summary.histogram('pre_activations', z_conv)
if activation is not None:
h_conv = activation(z_conv)
else:
h_conv = z_conv
if summary:
tf.summary.histogram('activations', h_conv)
return h_conv
def conv2d_transpose(x, W, output_shape, stride=1, padding="SAME"): # TODO: add output_shape ?
"""conv2d_transpose returns a 2d transpose convolution layer."""
return tf.nn.conv2d_transpose(x, W, output_shape, strides=[1, stride, stride, 1], padding=padding)
def complete_conv2d_transpose(input_tensor, output_channels, output_size, kernel_size, stride=1, padding="SAME", activation=tf.nn.relu,
bias_init_value=0.025, std_factor=1, weight_decay=None, summary=False):
batch_size = input_tensor.get_shape().as_list()[0]
input_channels = input_tensor.get_shape().as_list()[-1]
output_channels = int(output_channels)
with tf.name_scope('W'):
w_conv = weight_variable([kernel_size[0], kernel_size[1], output_channels, input_channels], std_factor=std_factor, wd=weight_decay)
if summary:
variable_summaries(w_conv)
with tf.name_scope('bias'):
b_conv = bias_variable([output_channels], init_value=bias_init_value)
if summary:
variable_summaries(b_conv)
z_conv = conv2d_transpose(input_tensor, w_conv, [batch_size, output_size[0], output_size[1], output_channels], stride=stride, padding=padding) + b_conv
if summary:
tf.summary.histogram('pre_activations', z_conv)
h_conv = activation(z_conv)
if summary:
tf.summary.histogram('activations', h_conv)
return h_conv
def complete_fc(input_tensor, output_channels, bias_init_value=0.025, weight_decay=None, activation=tf.nn.relu, summary=False):
batch_size = input_tensor.get_shape().as_list()[0]
net = tf.reshape(input_tensor, (batch_size, -1))
input_channels = net.get_shape().as_list()[-1]
with tf.name_scope('W'):
w_fc = weight_variable([input_channels, output_channels], wd=weight_decay)
if summary:
variable_summaries(w_fc)
with tf.name_scope('bias'):
b_fc = bias_variable([output_channels], init_value=bias_init_value)
if summary:
variable_summaries(b_fc)
z_fc = tf.matmul(net, w_fc) + b_fc
if summary:
tf.summary.histogram('pre_activations', z_fc)
h_fc = activation(z_fc)
if summary:
tf.summary.histogram('activations', h_fc)
return h_fc
def max_pool_2x2(x):
"""max_pool_2x2 downsamples a feature map by 2X."""
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
def weight_variable(shape, std_factor=1, wd=None):
"""weight_variable generates a weight variable of a given shape. Adds weight decay if specified"""
# Initialize using Xavier initializer
fan_in = 100
fan_out = 100
if len(shape) == 4:
fan_in = shape[0] * shape[1] * shape[2]
fan_out = shape[3]
elif len(shape) == 2:
fan_in = shape[0]
fan_out = shape[1]
else:
print("WARNING: This shape format is not handled! len(shape) = {}".format(len(shape)))
stddev = std_factor * math.sqrt(2 / (fan_in + fan_out))
initial = tf.truncated_normal(shape, stddev=stddev)
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(initial), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return tf.Variable(initial)
def bias_variable(shape, init_value=0.025):
"""bias_variable generates a bias variable of a given shape."""
initial = tf.constant(init_value, shape=shape)
return tf.Variable(initial)
def parametric_relu(_x):
alphas = tf.get_variable('alpha', _x.get_shape()[-1],
initializer=tf.constant_initializer(0.0),
dtype=tf.float32)
pos = tf.nn.relu(_x)
neg = alphas * (_x - abs(_x)) * 0.5
return pos + neg
def variable_summaries(var):
"""Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
# with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
# with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
tf.summary.histogram('histogram', var)
def make_depthwise_kernel(a, in_channels):
"""Transform a 2D array into a convolution kernel"""
a = np.asarray(a)
a = a.reshape(list(a.shape) + [1, 1])
a = tf.constant(a, dtype=tf.float32)
a = tf.tile(a, [1, 1, in_channels, 1])
return a
def dilate(image, filter_size=2):
rank = len(image.get_shape())
if rank == 3:
image = tf.expand_dims(image, axis=0) # Add batch dim
depth = image.get_shape().as_list()[-1]
filter = np.zeros((filter_size, filter_size, depth)) # I don't know why filter with all zeros works...
output = tf.nn.dilation2d(image, filter, strides=[1, 1, 1, 1], rates=[1, 1, 1, 1], padding="SAME", name='dilation2d')
if rank == 3:
return output[0]
else:
return output
# rank = len(input.get_shape())
# channels = input.get_shape().as_list()[-1]
# kernel_size = 2*radius + 1
# kernel_array = np.ones((kernel_size, kernel_size)) / (kernel_size*kernel_size)
# kernel = make_depthwise_kernel(kernel_array, channels)
# if rank == 3:
# input = tf.expand_dims(input, axis=0) # Add batch dim
# output = tf.nn.depthwise_conv2d(input, kernel, [1, 1, 1, 1], padding='SAME')
# if rank == 3:
# return output[0]
# else:
# return output
def gaussian_blur(image, filter_size, mean, std):
def make_gaussian_kernel(size: int,
mean: float,
std: float,
):
"""Makes 2D gaussian Kernel for convolution."""
mean = float(mean)
std= float(std)
d = tf.distributions.Normal(mean, std)
vals = d.prob(tf.range(start=-size, limit=size + 1, dtype=tf.float32))
gauss_kernel = tf.einsum('i,j->ij',
vals,
vals)
return gauss_kernel / tf.reduce_sum(gauss_kernel)
gauss_kernel = make_gaussian_kernel(filter_size, mean, std)
gauss_kernel = gauss_kernel[:, :, tf.newaxis, tf.newaxis]
image_blurred = tf.nn.conv2d(image, gauss_kernel, strides=[1, 1, 1, 1], padding="SAME")
return image_blurred
def create_array_to_feed_placeholder(placeholder):
shape = placeholder.get_shape().as_list()
shape_removed_none = []
for dim in shape:
if dim is not None:
shape_removed_none.append(dim)
else:
shape_removed_none.append(0)
return np.empty(shape_removed_none)
| 9,356 | 35.694118 | 155 | py |
mapalignment | mapalignment-master/projects/utils/viz_utils.py | import sys
import numpy as np
sys.path.append("../../utils")
import polygon_utils
import skimage.io
import cv2
def save_plot_image_polygon(filepath, image, polygons):
spatial_shape = image.shape[:2]
polygons_map = polygon_utils.draw_polygon_map(polygons, spatial_shape, fill=False, edges=True,
vertices=False, line_width=1)
output_image = image[:, :, :3] # Keep first 3 channels
output_image = output_image.astype(np.float64)
output_image[np.where(0 < polygons_map[:, :, 0])] = np.array([0, 0, 255])
# output_image = np.clip(output_image, 0, 255)
output_image = output_image.astype(np.uint8)
skimage.io.imsave(filepath, output_image)
def save_plot_segmentation_image(filepath, segmentation_image):
output_image = np.zeros((segmentation_image.shape[0], segmentation_image.shape[1], 4))
output_image[:, :, :3] = segmentation_image[:, :, 1:4] # Remove background channel
output_image[:, :, 3] = np.sum(segmentation_image[:, :, 1:4], axis=-1) # Add alpha
output_image = output_image * 255
output_image = np.clip(output_image, 0, 255)
output_image = output_image.astype(np.uint8)
skimage.io.imsave(filepath, output_image)
def flow_to_image(flow):
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
hsv = np.zeros((flow.shape[0], flow.shape[1], 3))
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 1] = 255
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
hsv = hsv.astype(np.uint8)
rgb = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
return rgb
| 1,610 | 34.021739 | 98 | py |
mapalignment | mapalignment-master/projects/utils/dataset_utils.py | import os
import tensorflow as tf
class TFRecordShardWriter:
def __init__(self, filepath_format, max_records_per_shard):
self.filepath_format = filepath_format
self.max_records_per_shard = max_records_per_shard
self.current_shard_record_count = 0 # To know when to switch to a new file
self.current_shard_count = 0 # To know how to name the record file
self.writer = None
self.create_new_shard_writer()
def create_new_shard_writer(self):
filename = self.filepath_format.format(self.current_shard_count)
os.makedirs(os.path.dirname(filename), exist_ok=True)
self.writer = tf.python_io.TFRecordWriter(filename)
self.current_shard_count += 1
def write(self, serialized_example):
self.current_shard_record_count += 1
if self.max_records_per_shard < self.current_shard_record_count:
self.create_new_shard_writer()
self.current_shard_record_count = 1
self.writer.write(serialized_example)
def close(self):
self.writer.close()
| 1,079 | 36.241379 | 83 | py |
mapalignment | mapalignment-master/projects/utils/python_utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import errno
import json
from jsmin import jsmin
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
def choose_first_existing_path(path_list):
for path in path_list:
if os.path.exists(os.path.expanduser(path)):
return path
return None
def get_display_availability():
return "DISPLAY" in os.environ
def get_filepaths(dir_path, endswith_str="", startswith_str=""):
if os.path.isdir(dir_path):
image_filepaths = []
for path, dnames, fnames in os.walk(dir_path):
fnames = sorted(fnames)
image_filepaths.extend([os.path.join(path, x) for x in fnames if x.endswith(endswith_str) and x.startswith(startswith_str)])
return image_filepaths
else:
raise NotADirectoryError(errno.ENOENT, os.strerror(errno.ENOENT), dir_path)
def get_dir_list_filepaths(dir_path_list, endswith_str="", startswith_str=""):
image_filepaths = []
for dir_path in dir_path_list:
image_filepaths.extend(get_filepaths(dir_path, endswith_str=endswith_str, startswith_str=startswith_str))
return image_filepaths
def save_json(filepath, data):
dirpath = os.path.dirname(filepath)
os.makedirs(dirpath, exist_ok=True)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
return True
def load_json(filepath):
try:
with open(filepath, 'r') as f:
minified = jsmin(f.read())
data = json.loads(minified)
return data
except FileNotFoundError:
return False
def wipe_dir(dirpath):
filepaths = get_filepaths(dirpath)
for filepath in filepaths:
os.remove(filepath)
def split_list_into_chunks(l, n, pad=False):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
if pad:
chunk = l[i:i + n]
if len(chunk) < n:
chunk.extend([chunk[-1]]*(n - len(chunk)))
yield chunk
else:
yield l[i:i + n]
def params_to_str(params):
def to_str(value):
if type(value) == float and value == int(value):
return str(int(value))
return str(value)
return "_".join(["{}_{}".format(key, to_str(params[key])) for key in sorted(params.keys())])
def main():
l = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
batches = split_list_into_chunks(l, 4, pad=True)
for batch in batches:
print(batch)
if __name__ == '__main__':
main()
| 2,600 | 24.5 | 136 | py |
mapalignment | mapalignment-master/projects/utils/image_utils.py | from io import BytesIO
import math
import numpy as np
from PIL import Image
import skimage.draw
import python_utils
CV2 = False
if python_utils.module_exists("cv2"):
import cv2
CV2 = True
if python_utils.module_exists("matplotlib.pyplot"):
import matplotlib.pyplot as plt
def get_image_size(filepath):
im = Image.open(filepath)
return im.size
def load_image(image_filepath):
image = Image.open(image_filepath)
image.load()
image_array = np.array(image, dtype=np.uint8)
image.close()
return image_array
def padded_boundingbox(boundingbox, padding):
boundingbox_new = np.empty_like(boundingbox)
boundingbox_new[0:2] = boundingbox[0:2] + padding
boundingbox_new[2:4] = boundingbox[2:4] - padding
return boundingbox_new
def center_bbox(spatial_shape, output_shape):
"""
Return a bbox centered in spatial_shape with size output_shape
:param spatial_shape:
:param output_shape:
:return:
"""
center = (spatial_shape[0] / 2, spatial_shape[1] / 2)
half_output_shape = (output_shape[0] / 2, output_shape[1] / 2)
bbox = [center[0] - half_output_shape[0], center[1] - half_output_shape[1], center[0] + half_output_shape[0], center[1] + half_output_shape[1]]
bbox = bbox_to_int(bbox)
return bbox
def bbox_add_margin(bbox, margin):
bbox_new = bbox.copy()
bbox_new[0:2] -= margin
bbox_new[2:4] += margin
return bbox_new
def bbox_to_int(bbox):
bbox_new = [
int(np.floor(bbox[0])),
int(np.floor(bbox[1])),
int(np.ceil(bbox[2])),
int(np.ceil(bbox[3])),
]
return bbox_new
def draw_line_aa_in_patch(edge, patch_bounds):
rr, cc, prob = skimage.draw.line_aa(edge[0][0], edge[0][1], edge[1][0], edge[1][1])
keep_mask = (patch_bounds[0] <= rr) & (rr < patch_bounds[2]) \
& (patch_bounds[1] <= cc) & (cc < patch_bounds[3])
rr = rr[keep_mask]
cc = cc[keep_mask]
prob = prob[keep_mask]
return rr, cc, prob
def convert_array_to_jpg_bytes(image_array, mode=None):
img = Image.fromarray(image_array, mode=mode)
output = BytesIO()
img.save(output, format="JPEG", quality=90)
contents = output.getvalue()
output.close()
return contents
def displacement_map_to_transformation_maps(disp_field_map):
disp_field_map = disp_field_map.astype(np.float32)
i = np.arange(disp_field_map.shape[0], dtype=np.float32)
j = np.arange(disp_field_map.shape[1], dtype=np.float32)
iv, jv = np.meshgrid(i, j, indexing="ij")
reverse_map_i = iv + disp_field_map[:, :, 1]
reverse_map_j = jv + disp_field_map[:, :, 0]
return reverse_map_i, reverse_map_j
if CV2:
def apply_displacement_field_to_image(image, disp_field_map):
trans_map_i, trans_map_j = displacement_map_to_transformation_maps(disp_field_map)
misaligned_image = cv2.remap(image, trans_map_j, trans_map_i, cv2.INTER_CUBIC)
return misaligned_image
def apply_displacement_fields_to_image(image, disp_field_maps):
disp_field_map_count = disp_field_maps.shape[0]
misaligned_image_list = []
for i in range(disp_field_map_count):
misaligned_image = apply_displacement_field_to_image(image, disp_field_maps[i, :, :, :])
misaligned_image_list.append(misaligned_image)
return misaligned_image_list
else:
def apply_displacement_fields_to_image(image, disp_field_map):
print("cv2 is not available, the apply_displacement_fields_to_image(image, disp_field_map) function cannot work!")
def apply_displacement_fields_to_image(image, disp_field_maps):
print("cv2 is not available, the apply_displacement_fields_to_image(image, disp_field_maps) function cannot work!")
def get_axis_patch_count(length, stride, patch_res):
total_double_padding = patch_res - stride
patch_count = max(1, int(math.ceil((length - total_double_padding) / stride)))
return patch_count
def compute_patch_boundingboxes(image_size, stride, patch_res):
im_rows = image_size[0]
im_cols = image_size[1]
row_patch_count = get_axis_patch_count(im_rows, stride, patch_res)
col_patch_count = get_axis_patch_count(im_cols, stride, patch_res)
patch_boundingboxes = []
for i in range(0, row_patch_count):
if i < row_patch_count - 1:
row_slice_begin = i * stride
row_slice_end = row_slice_begin + patch_res
else:
row_slice_end = im_rows
row_slice_begin = row_slice_end - patch_res
for j in range(0, col_patch_count):
if j < col_patch_count - 1:
col_slice_begin = j*stride
col_slice_end = col_slice_begin + patch_res
else:
col_slice_end = im_cols
col_slice_begin = col_slice_end - patch_res
patch_boundingbox = np.array([row_slice_begin, col_slice_begin, row_slice_end, col_slice_end], dtype=np.int)
assert row_slice_end - row_slice_begin == col_slice_end - col_slice_begin == patch_res, "ERROR: patch does not have the requested shape"
patch_boundingboxes.append(patch_boundingbox)
return patch_boundingboxes
def clip_boundingbox(boundingbox, clip_list):
assert len(boundingbox) == len(clip_list), "len(boundingbox) should be equal to len(clip_values)"
clipped_boundingbox = []
for bb_value, clip in zip(boundingbox[:2], clip_list[:2]):
clipped_value = max(clip, bb_value)
clipped_boundingbox.append(clipped_value)
for bb_value, clip in zip(boundingbox[2:], clip_list[2:]):
clipped_value = min(clip, bb_value)
clipped_boundingbox.append(clipped_value)
return clipped_boundingbox
def crop_or_pad_image_with_boundingbox(image, patch_boundingbox):
im_rows = image.shape[0]
im_cols = image.shape[1]
row_padding_before = max(0, - patch_boundingbox[0])
col_padding_before = max(0, - patch_boundingbox[1])
row_padding_after = max(0, patch_boundingbox[2] - im_rows)
col_padding_after = max(0, patch_boundingbox[3] - im_cols)
# Center padding:
row_padding = row_padding_before + row_padding_after
col_padding = col_padding_before + col_padding_after
row_padding_before = row_padding // 2
col_padding_before = col_padding // 2
row_padding_after = row_padding - row_padding // 2
col_padding_after = col_padding - col_padding // 2
clipped_patch_boundingbox = clip_boundingbox(patch_boundingbox, [0, 0, im_rows, im_cols])
if len(image.shape) == 2:
patch = image[clipped_patch_boundingbox[0]:clipped_patch_boundingbox[2], clipped_patch_boundingbox[1]:clipped_patch_boundingbox[3]]
patch = np.pad(patch, [(row_padding_before, row_padding_after), (col_padding_before, col_padding_after)], mode="constant")
elif len(image.shape) == 3:
patch = image[clipped_patch_boundingbox[0]:clipped_patch_boundingbox[2], clipped_patch_boundingbox[1]:clipped_patch_boundingbox[3], :]
patch = np.pad(patch, [(row_padding_before, row_padding_after), (col_padding_before, col_padding_after), (0, 0)], mode="constant")
else:
print("Image input does not have the right shape/")
patch = None
return patch
def make_grid(images, padding=2, pad_value=0):
nmaps = images.shape[0]
ymaps = int(math.floor(math.sqrt(nmaps)))
xmaps = nmaps // ymaps
height, width = int(images.shape[1] + padding), int(images.shape[2] + padding)
grid = np.zeros((height * ymaps + padding, width * xmaps + padding, images.shape[3])) + pad_value
k = 0
for y in range(ymaps):
for x in range(xmaps):
if k >= nmaps:
break
grid[y * height + padding:(y+1) * height, x * width + padding:(x+1) * width, :] = images[k]
k = k + 1
return grid
if __name__ == "__main__":
im_rows = 5
im_cols = 10
stride = 1
patch_res = 15
image = np.random.randint(0, 256, size=(im_rows, im_cols, 3), dtype=np.uint8)
image = Image.fromarray(image)
image = np.array(image)
plt.ion()
plt.figure(1)
plt.imshow(image)
plt.show()
# Cut patches
patch_boundingboxes = compute_patch_boundingboxes(image.shape[0:2], stride, patch_res)
plt.figure(2)
for patch_boundingbox in patch_boundingboxes:
patch = crop_or_pad_image_with_boundingbox(image, patch_boundingbox)
plt.imshow(patch)
plt.show()
input("Press <Enter> to finish...")
| 8,487 | 34.514644 | 148 | py |
mapalignment | mapalignment-master/projects/utils/print_utils.py | class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
DEBUG = '\033[31;40m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def print_info(string):
print(bcolors.OKBLUE + string + bcolors.ENDC)
def print_success(string):
print(bcolors.OKGREEN + string + bcolors.ENDC)
def print_failure(string):
print(bcolors.FAIL + string + bcolors.ENDC)
def print_error(string):
print_failure(string)
def print_warning(string):
print(bcolors.WARNING + string + bcolors.ENDC)
def print_debug(string):
print(bcolors.DEBUG + string + bcolors.ENDC)
def print_format_table():
"""
prints table of formatted text format options
"""
for style in range(8):
for fg in range(30, 38):
s1 = ''
for bg in range(40, 48):
format = ';'.join([str(style), str(fg), str(bg)])
s1 += '\x1b[%sm %s \x1b[0m' % (format, format)
print(s1)
print('\n')
def main():
print_format_table()
print_info("Info")
print_success("Success")
print_failure("Failure")
print_error("ERROR")
print_warning("WARNING")
print_debug("Debug")
if __name__ == '__main__':
main()
| 1,293 | 19.21875 | 65 | py |
mapalignment | mapalignment-master/projects/utils/polygon_utils.py | import math
import random
import numpy as np
import scipy.spatial
from PIL import Image, ImageDraw, ImageFilter
import skimage
import python_utils
if python_utils.module_exists("skimage.measure"):
from skimage.measure import approximate_polygon
if python_utils.module_exists("shapely"):
from shapely import geometry
def is_polygon_clockwise(polygon):
rolled_polygon = np.roll(polygon, shift=1, axis=0)
double_signed_area = np.sum((rolled_polygon[:, 0] - polygon[:, 0]) * (rolled_polygon[:, 1] + polygon[:, 1]))
if 0 < double_signed_area:
return True
else:
return False
def orient_polygon(polygon, orientation="CW"):
poly_is_orientated_cw = is_polygon_clockwise(polygon)
if (poly_is_orientated_cw and orientation == "CCW") or (not poly_is_orientated_cw and orientation == "CW"):
return np.flip(polygon, axis=0)
else:
return polygon
def orient_polygons(polygons, orientation="CW"):
return [orient_polygon(polygon, orientation=orientation) for polygon in polygons]
def raster_to_polygon(image, vertex_count):
contours = skimage.measure.find_contours(image, 0.5)
contour = np.empty_like(contours[0])
contour[:, 0] = contours[0][:, 1]
contour[:, 1] = contours[0][:, 0]
# Simplify until vertex_count
tolerance = 0.1
tolerance_step = 0.1
simplified_contour = contour
while 1 + vertex_count < len(simplified_contour):
simplified_contour = approximate_polygon(contour, tolerance=tolerance)
tolerance += tolerance_step
simplified_contour = simplified_contour[:-1]
# plt.imshow(image, cmap="gray")
# plot_polygon(simplified_contour, draw_labels=False)
# plt.show()
return simplified_contour
def l2diffs(polygon1, polygon2):
"""
Computes vertex-wise L2 difference between the two polygons.
As the two polygons may not have the same starting vertex,
all shifts are considred and the shift resulting in the minimum mean L2 difference is chosen
:param polygon1:
:param polygon2:
:return:
"""
# Make polygons of equal length
if len(polygon1) != len(polygon2):
while len(polygon1) < len(polygon2):
polygon1 = np.append(polygon1, [polygon1[-1, :]], axis=0)
while len(polygon2) < len(polygon1):
polygon2 = np.append(polygon2, [polygon2[-1, :]], axis=0)
vertex_count = len(polygon1)
def naive_l2diffs(polygon1, polygon2):
naive_l2diffs_result = np.sqrt(np.power(np.sum(polygon1 - polygon2, axis=1), 2))
return naive_l2diffs_result
min_l2_diffs = naive_l2diffs(polygon1, polygon2)
min_mean_l2_diffs = np.mean(min_l2_diffs, axis=0)
for i in range(1, vertex_count):
current_naive_l2diffs = naive_l2diffs(np.roll(polygon1, shift=i, axis=0), polygon2)
current_naive_mean_l2diffs = np.mean(current_naive_l2diffs, axis=0)
if current_naive_mean_l2diffs < min_mean_l2_diffs:
min_l2_diffs = current_naive_l2diffs
min_mean_l2_diffs = current_naive_mean_l2diffs
return min_l2_diffs
def check_intersection_with_polygon(input_polygon, target_polygon):
poly1 = geometry.Polygon(input_polygon).buffer(0)
poly2 = geometry.Polygon(target_polygon).buffer(0)
intersection_poly = poly1.intersection(poly2)
intersection_area = intersection_poly.area
is_intersection = 0 < intersection_area
return is_intersection
def check_intersection_with_polygons(input_polygon, target_polygons):
"""
Returns True if there is an intersection with at least one polygon in target_polygons
:param input_polygon:
:param target_polygons:
:return:
"""
for target_polygon in target_polygons:
if check_intersection_with_polygon(input_polygon, target_polygon):
return True
return False
def polygon_area(polygon):
poly = geometry.Polygon(polygon).buffer(0)
return poly.area
def polygon_union(polygon1, polygon2):
poly1 = geometry.Polygon(polygon1).buffer(0)
poly2 = geometry.Polygon(polygon2).buffer(0)
union_poly = poly1.union(poly2)
return np.array(union_poly.exterior.coords)
def polygon_iou(polygon1, polygon2):
poly1 = geometry.Polygon(polygon1).buffer(0)
poly2 = geometry.Polygon(polygon2).buffer(0)
intersection_poly = poly1.intersection(poly2)
union_poly = poly1.union(poly2)
intersection_area = intersection_poly.area
union_area = union_poly.area
if union_area:
iou = intersection_area / union_area
else:
iou = 0
return iou
def generate_polygon(cx, cy, ave_radius, irregularity, spikeyness, vertex_count):
"""
Start with the centre of the polygon at cx, cy,
then creates the polygon by sampling points on a circle around the centre.
Random noise is added by varying the angular spacing between sequential points,
and by varying the radial distance of each point from the centre.
Params:
cx, cy - coordinates of the "centre" of the polygon
ave_radius - in px, the average radius of this polygon, this roughly controls how large the polygon is,
really only useful for order of magnitude.
irregularity - [0,1] indicating how much variance there is in the angular spacing of vertices. [0,1] will map to
[0, 2 * pi / vertex_count]
spikeyness - [0,1] indicating how much variance there is in each vertex from the circle of radius ave_radius.
[0,1] will map to [0, ave_radius]
vertex_count - self-explanatory
Returns a list of vertices, in CCW order.
"""
irregularity = clip(irregularity, 0, 1) * 2 * math.pi / vertex_count
spikeyness = clip(spikeyness, 0, 1) * ave_radius
# generate n angle steps
angle_steps = []
lower = (2 * math.pi / vertex_count) - irregularity
upper = (2 * math.pi / vertex_count) + irregularity
angle_sum = 0
for i in range(vertex_count):
tmp = random.uniform(lower, upper)
angle_steps.append(tmp)
angle_sum = angle_sum + tmp
# normalize the steps so that point 0 and point n+1 are the same
k = angle_sum / (2 * math.pi)
for i in range(vertex_count):
angle_steps[i] = angle_steps[i] / k
# now generate the points
points = []
angle = random.uniform(0, 2 * math.pi)
for i in range(vertex_count):
r_i = clip(random.gauss(ave_radius, spikeyness), 0, 2 * ave_radius)
x = cx + r_i * math.cos(angle)
y = cy + r_i * math.sin(angle)
points.append((x, y))
angle = angle + angle_steps[i]
return points
def clip(x, mini, maxi):
if mini > maxi:
return x
elif x < mini:
return mini
elif x > maxi:
return maxi
else:
return x
def scale_bounding_box(bounding_box, scale):
half_width = math.ceil((bounding_box[2] - bounding_box[0]) * scale / 2)
half_height = math.ceil((bounding_box[3] - bounding_box[1]) * scale / 2)
center = [round((bounding_box[0] + bounding_box[2]) / 2), round((bounding_box[1] + bounding_box[3]) / 2)]
scaled_bounding_box = [int(center[0] - half_width), int(center[1] - half_height), int(center[0] + half_width),
int(center[1] + half_height)]
return scaled_bounding_box
def pad_bounding_box(bbox, pad):
return [bbox[0] + pad, bbox[1] + pad, bbox[2] - pad, bbox[3] - pad]
def compute_bounding_box(polygon, scale=1, boundingbox_margin=0, fit=None):
# Compute base bounding box
bounding_box = [np.min(polygon[:, 0]), np.min(polygon[:, 1]), np.max(polygon[:, 0]), np.max(polygon[:, 1])]
# Scale
half_width = math.ceil((bounding_box[2] - bounding_box[0]) * scale / 2)
half_height = math.ceil((bounding_box[3] - bounding_box[1]) * scale / 2)
# Add margin
half_width += boundingbox_margin
half_height += boundingbox_margin
# Compute square bounding box
if fit == "square":
half_width = half_height = max(half_width, half_height)
center = [round((bounding_box[0] + bounding_box[2]) / 2), round((bounding_box[1] + bounding_box[3]) / 2)]
bounding_box = [int(center[0] - half_width), int(center[1] - half_height), int(center[0] + half_width),
int(center[1] + half_height)]
return bounding_box
def compute_patch(polygon, patch_size):
centroid = np.mean(polygon, axis=0)
half_height = half_width = patch_size / 2
bounding_box = [math.ceil(centroid[0] - half_width), math.ceil(centroid[1] - half_height),
math.ceil(centroid[0] + half_width), math.ceil(centroid[1] + half_height)]
return bounding_box
def bounding_box_within_bounds(bounding_box, bounds):
return bounds[0] <= bounding_box[0] and bounds[1] <= bounding_box[1] and bounding_box[2] <= bounds[2] and \
bounding_box[3] <= bounds[3]
def vertex_within_bounds(vertex, bounds):
return bounds[0] <= vertex[0] <= bounds[2] and \
bounds[1] <= vertex[1] <= bounds[3]
def edge_within_bounds(edge, bounds):
return vertex_within_bounds(edge[0], bounds) and vertex_within_bounds(edge[1], bounds)
def bounding_box_area(bounding_box):
return (bounding_box[2] - bounding_box[0]) * (bounding_box[3] - bounding_box[1])
def convert_to_image_patch_space(polygon_image_space, bounding_box):
polygon_image_patch_space = np.empty_like(polygon_image_space)
polygon_image_patch_space[:, 0] = polygon_image_space[:, 0] - bounding_box[0]
polygon_image_patch_space[:, 1] = polygon_image_space[:, 1] - bounding_box[1]
return polygon_image_patch_space
def strip_redundant_vertex(vertices, epsilon=1):
assert len(vertices.shape) == 2 # Is a polygon
new_vertices = vertices
if 1 < vertices.shape[0]:
if np.sum(np.absolute(vertices[0, :] - vertices[-1, :])) < epsilon:
new_vertices = vertices[:-1, :]
return new_vertices
def remove_doubles(vertices, epsilon=0.1):
dists = np.linalg.norm(np.roll(vertices, -1, axis=0) - vertices, axis=-1)
new_vertices = vertices[epsilon < dists]
return new_vertices
def simplify_polygon(polygon, tolerance=1):
approx_polygon = approximate_polygon(polygon, tolerance=tolerance)
return approx_polygon
def simplify_polygons(polygons, tolerance=1):
approx_polygons = []
for polygon in polygons:
approx_polygon = approximate_polygon(polygon, tolerance=tolerance)
approx_polygons.append(approx_polygon)
return approx_polygons
def pad_polygon(vertices, target_length):
assert len(vertices.shape) == 2 # Is a polygon
assert vertices.shape[0] <= target_length
padding_length = target_length - vertices.shape[0]
padding = np.tile(vertices[-1], [padding_length, 1])
padded_vertices = np.append(vertices, padding, axis=0)
return padded_vertices
def compute_diameter(polygon):
dist = scipy.spatial.distance.cdist(polygon, polygon)
return dist.max()
def plot_polygon(polygon, color=None, draw_labels=True, label_direction=1, indexing="xy", axis=None):
if python_utils.module_exists("matplotlib.pyplot"):
import matplotlib.pyplot as plt
if axis is None:
axis = plt.gca()
polygon_closed = np.append(polygon, [polygon[0, :]], axis=0)
if indexing == "xy=":
axis.plot(polygon_closed[:, 0], polygon_closed[:, 1], color=color, linewidth=3.0)
elif indexing == "ij":
axis.plot(polygon_closed[:, 1], polygon_closed[:, 0], color=color, linewidth=3.0)
else:
print("WARNING: Invalid indexing argument")
if draw_labels:
labels = range(1, polygon.shape[0] + 1)
for label, x, y in zip(labels, polygon[:, 0], polygon[:, 1]):
axis.annotate(
label,
xy=(x, y), xytext=(-20 * label_direction, 20 * label_direction),
textcoords='offset points', ha='right', va='bottom',
bbox=dict(boxstyle='round,pad=0.25', fc=color, alpha=0.75),
arrowprops=dict(arrowstyle='->', color=color, connectionstyle='arc3,rad=0'))
def plot_polygons(polygons, color=None, draw_labels=True, label_direction=1, indexing="xy", axis=None):
for polygon in polygons:
plot_polygon(polygon, color=color, draw_labels=draw_labels, label_direction=label_direction, indexing=indexing,
axis=axis)
def compute_edge_normal(edge):
normal = np.array([- (edge[1][1] - edge[0][1]),
edge[1][0] - edge[0][0]])
normal_norm = np.sqrt(np.sum(np.square(normal)))
normal /= normal_norm
return normal
def compute_vector_angle(x, y):
if x < 0.0:
slope = y / x
angle = np.pi + np.arctan(slope)
elif 0.0 < x:
slope = y / x
angle = np.arctan(slope)
else:
if 0 < y:
angle = np.pi / 2
else:
angle = 3 * np.pi / 2
if angle < 0.0:
angle += 2 * np.pi
return angle
def compute_edge_normal_angle_edge(edge):
normal = compute_edge_normal(edge)
normal_x = normal[1]
normal_y = normal[0]
angle = compute_vector_angle(normal_x, normal_y)
return angle
def polygon_in_bounding_box(polygon, bounding_box):
"""
Returns True if all vertices of polygons are inside bounding_box
:param polygon: [N, 2]
:param bounding_box: [row_min, col_min, row_max, col_max]
:return:
"""
result = np.all(
np.logical_and(
np.logical_and(bounding_box[0] <= polygon[:, 0], polygon[:, 0] <= bounding_box[2]),
np.logical_and(bounding_box[1] <= polygon[:, 1], polygon[:, 1] <= bounding_box[3])
)
)
return result
def filter_polygons_in_bounding_box(polygons, bounding_box):
"""
Only keep polygons that are fully inside bounding_box
:param polygons: [shape(N, 2), ...]
:param bounding_box: [row_min, col_min, row_max, col_max]
:return:
"""
filtered_polygons = []
for polygon in polygons:
if polygon_in_bounding_box(polygon, bounding_box):
filtered_polygons.append(polygon)
return filtered_polygons
def transform_polygon_to_bounding_box_space(polygon, bounding_box):
"""
:param polygon: shape(N, 2)
:param bounding_box: [row_min, col_min, row_max, col_max]
:return:
"""
assert len(polygon.shape) and polygon.shape[1] == 2, "polygon should have shape (N, 2), not shape {}".format(
polygon.shape)
assert len(bounding_box) == 4, "bounding_box should have 4 elements: [row_min, col_min, row_max, col_max]"
transformed_polygon = polygon.copy()
transformed_polygon[:, 0] -= bounding_box[0]
transformed_polygon[:, 1] -= bounding_box[1]
return transformed_polygon
def transform_polygons_to_bounding_box_space(polygons, bounding_box):
transformed_polygons = []
for polygon in polygons:
transformed_polygons.append(transform_polygon_to_bounding_box_space(polygon, bounding_box))
return transformed_polygons
def crop_polygon_to_patch(polygon, bounding_box):
return transform_polygon_to_bounding_box_space(polygon, bounding_box)
def crop_polygon_to_patch_if_touch(polygon, bounding_box):
# Verify that at least one vertex is inside bounding_box
polygon_touches_patch = np.any(
np.logical_and(
np.logical_and(bounding_box[0] <= polygon[:, 0], polygon[:, 0] <= bounding_box[2]),
np.logical_and(bounding_box[1] <= polygon[:, 1], polygon[:, 1] <= bounding_box[3])
)
)
if polygon_touches_patch:
return crop_polygon_to_patch(polygon, bounding_box)
else:
return None
def crop_polygons_to_patch_if_touch(polygons, bounding_box, return_indices=False):
if return_indices:
indices = []
cropped_polygons = []
for i, polygon in enumerate(polygons):
cropped_polygon = crop_polygon_to_patch_if_touch(polygon, bounding_box)
if cropped_polygon is not None:
cropped_polygons.append(cropped_polygon)
if return_indices:
indices.append(i)
if return_indices:
return cropped_polygons, indices
else:
return cropped_polygons
def crop_polygons_to_patch(polygons, bounding_box):
cropped_polygons = []
for polygon in polygons:
cropped_polygon = crop_polygon_to_patch(polygon, bounding_box)
if cropped_polygon is not None:
cropped_polygons.append(cropped_polygon)
return cropped_polygons
def polygon_remove_holes(polygon):
polygon_no_holes = []
for coords in polygon:
if not np.isnan(coords[0]) and not np.isnan(coords[1]):
polygon_no_holes.append(coords)
else:
break
return np.array(polygon_no_holes)
def polygons_remove_holes(polygons):
gt_polygons_no_holes = []
for polygon in polygons:
gt_polygons_no_holes.append(polygon_remove_holes(polygon))
return gt_polygons_no_holes
def apply_batch_disp_map_to_polygons(pred_disp_field_map_batch, disp_polygons_batch):
"""
:param pred_disp_field_map_batch: shape(batch_size, height, width, 2)
:param disp_polygons_batch: shape(batch_size, polygon_count, vertex_count, 2)
:return:
"""
# Apply all displacements at once
batch_count = pred_disp_field_map_batch.shape[0]
row_count = pred_disp_field_map_batch.shape[1]
col_count = pred_disp_field_map_batch.shape[2]
disp_polygons_batch_int = np.round(disp_polygons_batch).astype(np.int)
# Clip coordinates to the field map:
disp_polygons_batch_int_nearest_valid_field = np.maximum(0, disp_polygons_batch_int)
disp_polygons_batch_int_nearest_valid_field[:, :, :, 0] = np.minimum(
disp_polygons_batch_int_nearest_valid_field[:, :, :, 0], row_count - 1)
disp_polygons_batch_int_nearest_valid_field[:, :, :, 1] = np.minimum(
disp_polygons_batch_int_nearest_valid_field[:, :, :, 1], col_count - 1)
aligned_disp_polygons_batch = disp_polygons_batch.copy()
for batch_index in range(batch_count):
mask = ~np.isnan(disp_polygons_batch[batch_index, :, :, 0]) # Checking one coordinate is enough
aligned_disp_polygons_batch[batch_index, mask, 0] += pred_disp_field_map_batch[batch_index,
disp_polygons_batch_int_nearest_valid_field[
batch_index, mask, 0],
disp_polygons_batch_int_nearest_valid_field[
batch_index, mask, 1], 0].flatten()
aligned_disp_polygons_batch[batch_index, mask, 1] += pred_disp_field_map_batch[batch_index,
disp_polygons_batch_int_nearest_valid_field[
batch_index, mask, 0],
disp_polygons_batch_int_nearest_valid_field[
batch_index, mask, 1], 1].flatten()
return aligned_disp_polygons_batch
def apply_disp_map_to_polygons(disp_field_map, polygons):
"""
:param disp_field_map: shape(height, width, 2)
:param polygon_list: [shape(N, 2), shape(M, 2), ...]
:return:
"""
disp_field_map_batch = np.expand_dims(disp_field_map, axis=0)
disp_polygons = []
for polygon in polygons:
polygon_batch = np.expand_dims(np.expand_dims(polygon, axis=0), axis=0) # Add batch and polygon_count dims
disp_polygon_batch = apply_batch_disp_map_to_polygons(disp_field_map_batch, polygon_batch)
disp_polygon_batch = disp_polygon_batch[0, 0] # Remove batch and polygon_count dims
disp_polygons.append(disp_polygon_batch)
return disp_polygons
# This next function is somewhat redundant with apply_disp_map_to_polygons... (but displaces in the opposite direction)
def apply_displacement_field_to_polygons(polygons, disp_field_map):
disp_polygons = []
for polygon in polygons:
mask_nans = np.isnan(polygon) # Will be necessary when polygons with holes are handled
polygon_int = np.round(polygon).astype(np.int)
polygon_int_clipped = np.maximum(0, polygon_int)
polygon_int_clipped[:, 0] = np.minimum(disp_field_map.shape[0] - 1, polygon_int_clipped[:, 0])
polygon_int_clipped[:, 1] = np.minimum(disp_field_map.shape[1] - 1, polygon_int_clipped[:, 1])
disp_polygon = polygon.copy()
disp_polygon[~mask_nans[:, 0], 0] -= disp_field_map[polygon_int_clipped[~mask_nans[:, 0], 0],
polygon_int_clipped[~mask_nans[:, 0], 1], 0]
disp_polygon[~mask_nans[:, 1], 1] -= disp_field_map[polygon_int_clipped[~mask_nans[:, 1], 0],
polygon_int_clipped[~mask_nans[:, 1], 1], 1]
disp_polygons.append(disp_polygon)
return disp_polygons
def apply_displacement_fields_to_polygons(polygons, disp_field_maps):
disp_field_map_count = disp_field_maps.shape[0]
disp_polygons_list = []
for i in range(disp_field_map_count):
disp_polygons = apply_displacement_field_to_polygons(polygons, disp_field_maps[i, :, :, :])
disp_polygons_list.append(disp_polygons)
return disp_polygons_list
def draw_line(shape, line, width, blur_radius=0):
im = Image.new("L", (shape[1], shape[0]))
# im_px_access = im.load()
draw = ImageDraw.Draw(im)
vertex_list = []
for coords in line:
vertex = (coords[1], coords[0])
vertex_list.append(vertex)
draw.line(vertex_list, fill=255, width=width)
if 0 < blur_radius:
im = im.filter(ImageFilter.GaussianBlur(radius=blur_radius))
array = np.array(im) / 255
return array
def draw_triangle(shape, triangle, blur_radius=0):
im = Image.new("L", (shape[1], shape[0]))
# im_px_access = im.load()
draw = ImageDraw.Draw(im)
vertex_list = []
for coords in triangle:
vertex = (coords[1], coords[0])
vertex_list.append(vertex)
draw.polygon(vertex_list, fill=255)
if 0 < blur_radius:
im = im.filter(ImageFilter.GaussianBlur(radius=blur_radius))
array = np.array(im) / 255
return array
def draw_polygon(polygon, shape, fill=True, edges=True, vertices=True, line_width=3):
# TODO: handle holes in polygons
im = Image.new("RGB", (shape[1], shape[0]))
im_px_access = im.load()
draw = ImageDraw.Draw(im)
vertex_list = []
for coords in polygon:
vertex = (coords[1], coords[0])
if not np.isnan(vertex[0]) and not np.isnan(vertex[1]):
vertex_list.append(vertex)
else:
break
if edges:
draw.line(vertex_list, fill=(0, 255, 0), width=line_width)
if fill:
draw.polygon(vertex_list, fill=(255, 0, 0))
if vertices:
draw.point(vertex_list, fill=(0, 0, 255))
# Convert image to numpy array with the right number of channels
array = np.array(im)
selection = [fill, edges, vertices]
selected_array = array[:, :, selection]
return selected_array
def draw_polygons(polygons, shape, fill=True, edges=True, vertices=True, line_width=3):
# TODO: handle holes in polygons
# Channels
fill_channel_index = 0 # Always first channel
edges_channel_index = fill # If fill == True, take second channel. If not then take first
vertices_channel_index = fill + edges # Same principle as above
channel_count = fill + edges + vertices
im_draw_list = []
for channel_index in range(channel_count):
im = Image.new("L", (shape[1], shape[0]))
im_px_access = im.load()
draw = ImageDraw.Draw(im)
im_draw_list.append((im, draw))
for polygon in polygons:
vertex_list = []
for coords in polygon:
vertex = (coords[1], coords[0])
if not np.isnan(vertex[0]) and not np.isnan(vertex[1]):
vertex_list.append(vertex)
else:
break
if fill:
draw = im_draw_list[fill_channel_index][1]
draw.polygon(vertex_list, fill=255)
if edges:
draw = im_draw_list[edges_channel_index][1]
draw.line(vertex_list, fill=255, width=line_width)
if vertices:
draw = im_draw_list[vertices_channel_index][1]
draw.point(vertex_list, fill=255)
# Convert image to numpy array with the right number of channels
array_list = [np.array(im_draw[0]) for im_draw in im_draw_list]
array = np.stack(array_list, axis=-1)
return array
def draw_polygon_map(polygons, shape, fill=True, edges=True, vertices=True, line_width=3):
"""
Alias for draw_polygon function
:param polygons:
:param shape:
:param fill:
:param edges:
:param vertices:
:param line_width:
:return:
"""
return draw_polygons(polygons, shape, fill=fill, edges=edges, vertices=vertices, line_width=line_width)
def draw_polygon_maps(polygons_list, shape, fill=True, edges=True, vertices=True, line_width=3):
polygon_maps_list = []
for polygons in polygons_list:
polygon_map = draw_polygon_map(polygons, shape, fill=fill, edges=edges, vertices=vertices,
line_width=line_width)
polygon_maps_list.append(polygon_map)
disp_field_maps = np.stack(polygon_maps_list, axis=0)
return disp_field_maps
def swap_coords(polygon):
polygon_new = polygon.copy()
polygon_new[..., 0] = polygon[..., 1]
polygon_new[..., 1] = polygon[..., 0]
return polygon_new
def prepare_polygons_for_tfrecord(gt_polygons, disp_polygons_list, boundingbox=None):
assert len(gt_polygons)
# print("Starting to crop polygons")
# start = time.time()
dtype = gt_polygons[0].dtype
cropped_gt_polygons = []
cropped_disp_polygons_list = [[] for i in range(len(disp_polygons_list))]
polygon_length = 0
for polygon_index, gt_polygon in enumerate(gt_polygons):
if boundingbox is not None:
cropped_gt_polygon = crop_polygon_to_patch_if_touch(gt_polygon, boundingbox)
else:
cropped_gt_polygon = gt_polygon
if cropped_gt_polygon is not None:
cropped_gt_polygons.append(cropped_gt_polygon)
if polygon_length < cropped_gt_polygon.shape[0]:
polygon_length = cropped_gt_polygon.shape[0]
# Crop disp polygons
for disp_index, disp_polygons in enumerate(disp_polygons_list):
disp_polygon = disp_polygons[polygon_index]
if boundingbox is not None:
cropped_disp_polygon = crop_polygon_to_patch(disp_polygon, boundingbox)
else:
cropped_disp_polygon = disp_polygon
cropped_disp_polygons_list[disp_index].append(cropped_disp_polygon)
# end = time.time()
# print("Finished cropping polygons in in {}s".format(end - start))
#
# print("Starting to pad polygons")
# start = time.time()
polygon_count = len(cropped_gt_polygons)
if polygon_count:
# Add +1 to both dimensions for end-of-item NaNs
padded_gt_polygons = np.empty((polygon_count + 1, polygon_length + 1, 2), dtype=dtype)
padded_gt_polygons[:, :, :] = np.nan
padded_disp_polygons_array = np.empty((len(disp_polygons_list), polygon_count + 1, polygon_length + 1, 2),
dtype=dtype)
padded_disp_polygons_array[:, :, :] = np.nan
for i, polygon in enumerate(cropped_gt_polygons):
padded_gt_polygons[i, 0:polygon.shape[0], :] = polygon
for j, polygons in enumerate(cropped_disp_polygons_list):
for i, polygon in enumerate(polygons):
padded_disp_polygons_array[j, i, 0:polygon.shape[0], :] = polygon
else:
padded_gt_polygons = padded_disp_polygons_array = None
# end = time.time()
# print("Finished padding polygons in in {}s".format(end - start))
return padded_gt_polygons, padded_disp_polygons_array
def prepare_stages_polygons_for_tfrecord(gt_polygons, disp_polygons_list_list, boundingbox):
assert len(gt_polygons)
print(gt_polygons)
print(disp_polygons_list_list)
exit()
# print("Starting to crop polygons")
# start = time.time()
dtype = gt_polygons[0].dtype
cropped_gt_polygons = []
cropped_disp_polygons_list_list = [[[] for i in range(len(disp_polygons_list))] for disp_polygons_list in
disp_polygons_list_list]
polygon_length = 0
for polygon_index, gt_polygon in enumerate(gt_polygons):
cropped_gt_polygon = crop_polygon_to_patch_if_touch(gt_polygon, boundingbox)
if cropped_gt_polygon is not None:
cropped_gt_polygons.append(cropped_gt_polygon)
if polygon_length < cropped_gt_polygon.shape[0]:
polygon_length = cropped_gt_polygon.shape[0]
# Crop disp polygons
for stage_index, disp_polygons_list in enumerate(disp_polygons_list_list):
for disp_index, disp_polygons in enumerate(disp_polygons_list):
disp_polygon = disp_polygons[polygon_index]
cropped_disp_polygon = crop_polygon_to_patch(disp_polygon, boundingbox)
cropped_disp_polygons_list_list[stage_index][disp_index].append(cropped_disp_polygon)
# end = time.time()
# print("Finished cropping polygons in in {}s".format(end - start))
#
# print("Starting to pad polygons")
# start = time.time()
polygon_count = len(cropped_gt_polygons)
if polygon_count:
# Add +1 to both dimensions for end-of-item NaNs
padded_gt_polygons = np.empty((polygon_count + 1, polygon_length + 1, 2), dtype=dtype)
padded_gt_polygons[:, :, :] = np.nan
padded_disp_polygons_array = np.empty(
(len(disp_polygons_list_list), len(disp_polygons_list_list[0]), polygon_count + 1, polygon_length + 1, 2),
dtype=dtype)
padded_disp_polygons_array[:, :, :] = np.nan
for i, polygon in enumerate(cropped_gt_polygons):
padded_gt_polygons[i, 0:polygon.shape[0], :] = polygon
for k, cropped_disp_polygons_list in enumerate(cropped_disp_polygons_list_list):
for j, polygons in enumerate(cropped_disp_polygons_list):
for i, polygon in enumerate(polygons):
padded_disp_polygons_array[k, j, i, 0:polygon.shape[0], :] = polygon
else:
padded_gt_polygons = padded_disp_polygons_array = None
# end = time.time()
# print("Finished padding polygons in in {}s".format(end - start))
return padded_gt_polygons, padded_disp_polygons_array
def rescale_polygon(polygons, scaling_factor):
"""
:param polygons:
:return: scaling_factor
"""
if len(polygons):
rescaled_polygons = [polygon * scaling_factor for polygon in polygons]
return rescaled_polygons
else:
return polygons
def get_edge_center(edge):
return np.mean(edge, axis=0)
def get_edge_length(edge):
return np.sqrt(np.sum(np.square(edge[0] - edge[1])))
def get_edges_angle(edge1, edge2):
x1 = edge1[1, 0] - edge1[0, 0]
y1 = edge1[1, 1] - edge1[0, 1]
x2 = edge2[1, 0] - edge2[0, 0]
y2 = edge2[1, 1] - edge2[0, 1]
angle1 = compute_vector_angle(x1, y1)
angle2 = compute_vector_angle(x2, y2)
edges_angle = math.fabs(angle1 - angle2) % (2 * math.pi)
if math.pi < edges_angle:
edges_angle = 2 * math.pi - edges_angle
return edges_angle
def compute_angle_two_points(point_source, point_target):
vector = point_target - point_source
angle = compute_vector_angle(vector[0], vector[1])
return angle
def compute_angle_three_points(point_source, point_target1, point_target2):
squared_dist_source_target1 = math.pow((point_source[0] - point_target1[0]), 2) + math.pow(
(point_source[1] - point_target1[1]), 2)
squared_dist_source_target2 = math.pow((point_source[0] - point_target2[0]), 2) + math.pow(
(point_source[1] - point_target2[1]), 2)
squared_dist_target1_target2 = math.pow((point_target1[0] - point_target2[0]), 2) + math.pow(
(point_target1[1] - point_target2[1]), 2)
dist_source_target1 = math.sqrt(squared_dist_source_target1)
dist_source_target2 = math.sqrt(squared_dist_source_target2)
try:
cos = (squared_dist_source_target1 + squared_dist_source_target2 - squared_dist_target1_target2) / (
2 * dist_source_target1 * dist_source_target2)
except ZeroDivisionError:
return float('inf')
cos = max(min(cos, 1),
-1) # Avoid some math domain error due to cos being slightly bigger than 1 (from floating point operations)
angle = math.acos(cos)
return angle
def are_edges_overlapping(edge1, edge2, threshold):
"""
Checks if at least 2 different vertices of either edge lies on the other edge: it characterizes an overlap
:param edge1:
:param edge2:
:param threshold:
:return:
"""
count_list = [
is_vertex_on_edge(edge1[0], edge2, threshold),
is_vertex_on_edge(edge1[1], edge2, threshold),
is_vertex_on_edge(edge2[0], edge1, threshold),
is_vertex_on_edge(edge2[1], edge1, threshold),
]
# Count number of identical vertices
identical_vertex_list = [
np.array_equal(edge1[0], edge2[0]),
np.array_equal(edge1[0], edge2[1]),
np.array_equal(edge1[1], edge2[0]),
np.array_equal(edge1[1], edge2[1]),
]
adjusted_count = np.sum(count_list) - np.sum(identical_vertex_list)
return 2 <= adjusted_count
# def are_edges_collinear(edge1, edge2, angle_threshold):
# edges_angle = get_edges_angle(edge1, edge2)
# return edges_angle < angle_threshold
def get_line_intersect(a1, a2, b1, b2):
"""
Returns the point of intersection of the lines passing through a2,a1 and b2,b1.
a1: [x, y] a point on the first line
a2: [x, y] another point on the first line
b1: [x, y] a point on the second line
b2: [x, y] another point on the second line
"""
s = np.vstack([a1, a2, b1, b2]) # s for stacked
h = np.hstack((s, np.ones((4, 1)))) # h for homogeneous
l1 = np.cross(h[0], h[1]) # get first line
l2 = np.cross(h[2], h[3]) # get second line
x, y, z = np.cross(l1, l2) # point of intersection
if z == 0: # lines are parallel
return float('inf'), float('inf')
return x / z, y / z
def are_edges_intersecting(edge1, edge2, epsilon=1e-6):
"""
edge1 and edge2 should not have a common vertex between them
:param edge1:
:param edge2:
:return:
"""
intersect = get_line_intersect(edge1[0], edge1[1], edge2[0], edge2[1])
# print("---")
# print(edge1)
# print(edge2)
# print(intersect)
if intersect[0] == float('inf') or intersect[1] == float('inf'):
# Lines don't intersect
return False
else:
# Lines intersect
# Check if intersect point belongs to both edges
angle1 = compute_angle_three_points(intersect, edge1[0], edge1[1])
angle2 = compute_angle_three_points(intersect, edge2[0], edge2[1])
intersect_belongs_to_edges = (math.pi - epsilon) < angle1 and (math.pi - epsilon) < angle2
return intersect_belongs_to_edges
def shorten_edge(edge, length_to_cut1, length_to_cut2, min_length):
center = get_edge_center(edge)
total_length = get_edge_length(edge)
new_length = total_length - length_to_cut1 - length_to_cut2
if min_length <= new_length:
scale = new_length / total_length
new_edge = (edge.copy() - center) * scale + center
return new_edge
else:
return None
def is_edge_in_triangle(edge, triangle):
return edge[0] in triangle and edge[1] in triangle
def get_connectivity_of_edge(edge, triangles):
connectivity = 0
for triangle in triangles:
connectivity += is_edge_in_triangle(edge, triangle)
return connectivity
def get_connectivity_of_edges(edges, triangles):
connectivity_of_edges = []
for edge in edges:
connectivity_of_edge = get_connectivity_of_edge(edge, triangles)
connectivity_of_edges.append(connectivity_of_edge)
return connectivity_of_edges
def polygon_to_closest_int(polygons):
int_polygons = []
for polygon in polygons:
int_polygon = np.round(polygon)
int_polygons.append(int_polygon)
return int_polygons
def is_vertex_on_edge(vertex, edge, threshold):
"""
:param vertex:
:param edge:
:param threshold:
:return:
"""
# Compare distances sum to edge length
edge_length = get_edge_length(edge)
dist1 = get_edge_length([vertex, edge[0]])
dist2 = get_edge_length([vertex, edge[1]])
vertex_on_edge = (dist1 + dist2) < (edge_length + threshold)
return vertex_on_edge
def get_face_edges(face_vertices):
edges = []
prev_vertex = face_vertices[0]
for vertex in face_vertices[1:]:
edge = (prev_vertex, vertex)
edges.append(edge)
# For next iteration:
prev_vertex = vertex
return edges
def find_edge_in_face(edge, face_vertices):
# Copy inputs list so that we don't modify it
face_vertices = face_vertices[:]
face_vertices.append(face_vertices[0]) # Close face (does not matter if it is already closed)
edges = get_face_edges(face_vertices)
index = edges.index(edge)
return index
def clean_degenerate_face_edges(face_vertices):
def recursive_clean_degenerate_face_edges(open_face_vertices):
face_vertex_count = len(open_face_vertices)
cleaned_open_face_vertices = []
skip = False
for index in range(face_vertex_count):
if skip:
skip = False
else:
prev_vertex = open_face_vertices[(index - 1) % face_vertex_count]
vertex = open_face_vertices[index]
next_vertex = open_face_vertices[(index + 1) % face_vertex_count]
if prev_vertex != next_vertex:
cleaned_open_face_vertices.append(vertex)
else:
skip = True
if len(cleaned_open_face_vertices) < face_vertex_count:
return recursive_clean_degenerate_face_edges(cleaned_open_face_vertices)
else:
return cleaned_open_face_vertices
open_face_vertices = face_vertices[:-1]
cleaned_face_vertices = recursive_clean_degenerate_face_edges(open_face_vertices)
# Close cleaned_face_vertices
cleaned_face_vertices.append(cleaned_face_vertices[0])
return cleaned_face_vertices
def merge_vertices(main_face_vertices, extra_face_vertices, common_edge):
sorted_common_edge = tuple(sorted(common_edge))
open_face_vertices_pair = (main_face_vertices[:-1], extra_face_vertices[:-1])
face_index = 0 # 0: current_face == main_face, 1: current_face == extra_face
vertex_index = 0
start_vertex = vertex = open_face_vertices_pair[face_index][vertex_index]
merged_face_vertices = [start_vertex]
faces_merged = False
while not faces_merged:
# Get next vertex
next_vertex_index = (vertex_index + 1) % len(open_face_vertices_pair[face_index])
next_vertex = open_face_vertices_pair[face_index][next_vertex_index]
edge = (vertex, next_vertex)
sorted_edge = tuple(sorted(edge))
if sorted_edge == sorted_common_edge:
# Switch current face
face_index = 1 - face_index
# Find vertex_index in new current face
reverse_edge = (edge[1], edge[0]) # Because we are now on the other face
edge_index = find_edge_in_face(reverse_edge, open_face_vertices_pair[face_index])
vertex_index = edge_index + 1 # Index of the second vertex of edge
# vertex_index = open_face_vertices_pair[face_index].index(vertex)
vertex_index = (vertex_index + 1) % len(open_face_vertices_pair[face_index])
vertex = open_face_vertices_pair[face_index][vertex_index]
merged_face_vertices.append(vertex)
faces_merged = vertex == start_vertex # This also makes the merged_face closed
# Remove degenerate face edges (edges where the face if on both sides of it)
cleaned_merged_face_vertices = clean_degenerate_face_edges(merged_face_vertices)
return cleaned_merged_face_vertices
if __name__ == "__main__":
# polygon = np.array([
# [0, 0],
# [1, 0],
# [1, 1],
# [np.nan, np.nan],
# [0, 0],
# [1, 0],
# [1, 1],
# [np.nan, np.nan],
# ], dtype=np.float32)
# polygons = [
# polygon.copy(),
# polygon.copy(),
# polygon.copy(),
# polygon.copy() + 100,
# ]
#
# bounding_box = [10, 10, 100, 100] # Top left corner x, y, bottom right corner x, y
#
# cropped_polygons = crop_polygons_to_patch(polygons, bounding_box)
# print(cropped_polygons)
# # --- Check angle functions --- #
# edge1 = np.array([
# [0, 0],
# [1, 0],
# ])
# edge2 = np.array([
# [1, 0],
# [2, 0],
# ])
# edge_radius = 0.1
# edges_overlapping = are_edges_overlapping(edge1, edge2, edge_radius)
# print("edges_overlapping:")
# print(edges_overlapping)
# --- clean_degenerate_face_edges --- #
face_vertices = [215, 238, 220, 201, 193, 194, 195, 199, 213, 219, 235, 238, 215]
# face_vertices = [1, 2, 3, 4, 5, 4, 3, 6, 1]
print(face_vertices)
cleaned_face_vertices = clean_degenerate_face_edges(face_vertices)
print(cleaned_face_vertices)
| 42,079 | 36.437722 | 131 | py |
mapalignment | mapalignment-master/projects/utils/geo_utils.py | import numpy as np
import time
import json
import os.path
from osgeo import gdal, ogr
from osgeo import osr
import overpy
# from fiona.crs import from_epsg
# import fiona
from pyproj import Proj, transform
import polygon_utils
import math_utils
import print_utils
# --- Params --- #
QUERY_BASE = \
"""
<osm-script timeout="900" element-limit="1073741824">
<union>
<query type="way">
<has-kv k="{0}"/>
<bbox-query s="{1}" w="{2}" n="{3}" e="{4}"/>
</query>
<recurse type="way-node" into="nodes"/>
</union>
<print/>
</osm-script>
"""
WGS84_WKT = """
GEOGCS["GCS_WGS_1984",
DATUM["WGS_1984",
SPHEROID["WGS_84",6378137,298.257223563]],
PRIMEM["Greenwich",0],
UNIT["Degree",0.017453292519943295]]
"""
CRS = {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'}
# --- --- #
def get_coor_in_space(image_filepath):
"""
:param image_filepath: Path to geo-referenced tif image
:return: coor in original space and in wsg84 spatial reference and original geotransform
:return: geo transform (x_min, res, 0, y_max, 0, -res)
:return: [[OR_x_min,OR_y_min,OR_x_max,OR_y_max],[TR_x_min,TR_y_min,TR_x_max,TR_y_max]]
"""
# print(" get_coor_in_space(image_filepath)")
ds = gdal.Open(image_filepath)
width = ds.RasterXSize
height = ds.RasterYSize
gt = ds.GetGeoTransform()
x_min = gt[0]
y_min = gt[3] + width * gt[4] + height * gt[5]
x_max = gt[0] + width * gt[1] + height * gt[2]
y_max = gt[3]
prj = ds.GetProjection()
srs = osr.SpatialReference(wkt=prj)
coor_sys = srs.GetAttrValue("PROJCS|AUTHORITY", 1)
if coor_sys is None:
coor_sys = srs.GetAttrValue("GEOGCS|AUTHORITY", 1)
new_cs = osr.SpatialReference()
new_cs.ImportFromWkt(WGS84_WKT)
# print(srs, new_cs)
transform = osr.CoordinateTransformation(srs, new_cs)
lat_long_min = transform.TransformPoint(x_min, y_min)
lat_long_max = transform.TransformPoint(x_max, y_max)
coor = [[x_min, y_min, x_max, y_max], [lat_long_min[0], lat_long_min[1], lat_long_max[0], lat_long_max[1]]]
return coor, gt, coor_sys
def get_osm_data(coor_query):
"""
:param coor_query: [x_min, min_z, x_max, y_max]
:return: OSM query result
"""
api = overpy.Overpass()
query_buildings = QUERY_BASE.format("building", coor_query[1], coor_query[0], coor_query[3], coor_query[2])
query_successful = False
wait_duration = 60
result = None
while not query_successful:
try:
result = api.query(query_buildings)
query_successful = True
except overpy.exception.OverpassGatewayTimeout or overpy.exception.OverpassTooManyRequests or ConnectionResetError:
print("OSM server overload. Waiting for {} seconds before querying again...".format(wait_duration))
time.sleep(wait_duration)
wait_duration *= 2 # Multiply wait time by 2 for the next time
return result
def proj_to_epsg_space(nodes, coor_sys):
original = Proj(CRS)
destination = Proj(init='EPSG:{}'.format(coor_sys))
polygon = []
for node in nodes:
polygon.append(transform(original, destination, node.lon, node.lat))
return np.array(polygon)
def compute_epsg_to_image_mat(coor, gt):
x_min = coor[0][0]
y_max = coor[0][3]
transform_mat = np.array([
[gt[1], 0, 0],
[0, gt[5], 0],
[x_min, y_max, 1],
])
return np.linalg.inv(transform_mat)
def compute_image_to_epsg_mat(coor, gt):
x_min = coor[0][0]
y_max = coor[0][3]
transform_mat = np.array([
[gt[1], 0, 0],
[0, gt[5], 0],
[x_min, y_max, 1],
])
return transform_mat
def apply_transform_mat(polygon_epsg_space, transform_mat):
polygon_epsg_space_homogeneous = math_utils.to_homogeneous(polygon_epsg_space)
polygon_image_space_homogeneous = np.matmul(polygon_epsg_space_homogeneous, transform_mat)
polygon_image_space = math_utils.to_euclidian(polygon_image_space_homogeneous)
return polygon_image_space
def get_polygons_from_osm(image_filepath, tag=""):
coor, gt, coor_system = get_coor_in_space(image_filepath)
transform_mat = compute_epsg_to_image_mat(coor, gt)
osm_data = get_osm_data(coor[1])
polygons = []
for way in osm_data.ways:
if way.tags.get(tag, "n/a") != 'n/a':
# polygon = way.nodes[:-1] # Start and end vertex are the same so remove the end vertex
polygon = way.nodes
polygon_epsg_space = proj_to_epsg_space(polygon, coor_system)
polygon_image_space = apply_transform_mat(polygon_epsg_space, transform_mat)
polygon_image_space = polygon_utils.swap_coords(polygon_image_space)
polygons.append(polygon_image_space)
return polygons
def get_polygons_from_shapefile(image_filepath, input_shapefile_filepath):
coor, gt, coor_system = get_coor_in_space(image_filepath)
transform_mat = compute_epsg_to_image_mat(coor, gt)
file = ogr.Open(input_shapefile_filepath)
assert file is not None, "File {} does not exist!".format(input_shapefile_filepath)
shape = file.GetLayer(0)
feature_count = shape.GetFeatureCount()
polygons = []
properties_list = []
for feature_index in range(feature_count):
feature = shape.GetFeature(feature_index)
raw_json = feature.ExportToJson()
parsed_json = json.loads(raw_json)
# Extract polygon:
polygon = np.array(parsed_json["geometry"]["coordinates"][0])
assert len(polygon.shape) == 2, "polygon should have shape (n, d)"
if 2 < polygon.shape[1]:
print_utils.print_warning("WARNING: polygon from shapefile has shape {}. Will discard extra values to have polygon with shape ({}, 2)".format(polygon.shape, polygon.shape[0]))
polygon = polygon[:, :2]
polygon_epsg_space = polygon
polygon_image_space = apply_transform_mat(polygon_epsg_space, transform_mat)
polygon_image_space = polygon_utils.swap_coords(polygon_image_space)
polygons.append(polygon_image_space)
# Extract properties:
if "properties" in parsed_json:
properties = parsed_json["properties"]
properties_list.append(properties)
if properties_list:
return polygons, properties_list
else:
return polygons
def create_ogr_polygon(polygon, transform_mat):
polygon_swapped_coords = polygon_utils.swap_coords(polygon)
polygon_epsg = apply_transform_mat(polygon_swapped_coords, transform_mat)
ring = ogr.Geometry(ogr.wkbLinearRing)
for coord in polygon_epsg:
ring.AddPoint(coord[0], coord[1])
# Create polygon
poly = ogr.Geometry(ogr.wkbPolygon)
poly.AddGeometry(ring)
return poly.ExportToWkt()
def create_ogr_polygons(polygons, transform_mat):
ogr_polygons = []
for polygon in polygons:
ogr_polygons.append(create_ogr_polygon(polygon, transform_mat))
return ogr_polygons
def save_shapefile_from_polygons(polygons, image_filepath, output_shapefile_filepath, properties_list=None):
"""
https://gis.stackexchange.com/a/52708/8104
"""
if properties_list is not None:
assert len(polygons) == len(properties_list), "polygons and properties_list should have the same length"
coor, gt, coor_system = get_coor_in_space(image_filepath)
transform_mat = compute_image_to_epsg_mat(coor, gt)
# Convert polygons to ogr_polygons
ogr_polygons = create_ogr_polygons(polygons, transform_mat)
driver = ogr.GetDriverByName('Esri Shapefile')
ds = driver.CreateDataSource(output_shapefile_filepath)
# create the spatial reference, WGS84
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
layer = ds.CreateLayer('', None, ogr.wkbPolygon)
# Add one attribute
field_name_list = []
field_type_list = []
if properties_list is not None:
for properties in properties_list:
for (key, value) in properties.items():
if key not in field_name_list:
field_name_list.append(key)
field_type_list.append(type(value))
for (name, py_type) in zip(field_name_list, field_type_list):
if py_type == int:
ogr_type = ogr.OFTInteger
elif py_type == float:
print("is float")
ogr_type = ogr.OFTReal
elif py_type == str:
ogr_type = ogr.OFTString
else:
ogr_type = ogr.OFTInteger
layer.CreateField(ogr.FieldDefn(name, ogr_type))
defn = layer.GetLayerDefn()
for index in range(len(ogr_polygons)):
ogr_polygon = ogr_polygons[index]
if properties_list is not None:
properties = properties_list[index]
else:
properties = {}
# Create a new feature (attribute and geometry)
feat = ogr.Feature(defn)
for (key, value) in properties.items():
feat.SetField(key, value)
# Make a geometry, from Shapely object
geom = ogr.CreateGeometryFromWkt(ogr_polygon)
feat.SetGeometry(geom)
layer.CreateFeature(feat)
feat = geom = None # destroy these
# Save and close everything
ds = layer = feat = geom = None
def indices_of_biggest_intersecting_polygon(polygon_list):
"""
Assumes polygons which intersect follow each other on the order given by polygon_list.
This avoids the huge complexity of looking for an intersection between every polygon.
:param ori_gt_polygons:
:return:
"""
keep_index_list = []
current_cluster = [] # Indices of the polygons belonging to the current cluster (their union has one component)
for index, polygon in enumerate(polygon_list):
# First, check if polygon intersects with current_cluster:
current_cluster_polygons = [polygon_list[index] for index in current_cluster]
is_intersection = polygon_utils.check_intersection_with_polygons(polygon, current_cluster_polygons)
if is_intersection:
# Just add polygon to the cluster, nothing else to do
current_cluster.append(index)
else:
# This mean the current polygon is part of the next cluster.
# First, find the biggest polygon in the current cluster
cluster_max_index = 0
cluster_max_area = 0
for cluster_polygon_index in current_cluster:
cluster_polygon = polygon_list[cluster_polygon_index]
area = polygon_utils.polygon_area(cluster_polygon)
if cluster_max_area < area:
cluster_max_area = area
cluster_max_index = cluster_polygon_index
# Add index of the biggest polygon to the keep_index_list:
keep_index_list.append(cluster_max_index)
# Second, create a new cluster with the current polygon index
current_cluster = [index]
return keep_index_list
def get_pixelsize(filepath):
raster = gdal.Open(filepath)
gt = raster.GetGeoTransform()
pixelsize_x = gt[1]
pixelsize_y = -gt[5]
pixelsize = (pixelsize_x + pixelsize_y) / 2
return pixelsize
def main():
main_dirpath = "/workspace/data/stereo_dataset/raw/leibnitz"
image_filepath = os.path.join(main_dirpath, "leibnitz_ortho_ref_RGB.tif")
input_shapefile_filepath = os.path.join(main_dirpath, "Leibnitz_buildings_ref.shp")
output_shapefile_filepath = os.path.join(main_dirpath, "Leibnitz_buildings_ref.shifted.shp")
polygons, properties_list = get_polygons_from_shapefile(image_filepath, input_shapefile_filepath)
print(polygons[0])
print(properties_list[0])
# Add shift
shift = np.array([0, 0])
shifted_polygons = [polygon + shift for polygon in polygons]
print(shifted_polygons[0])
# Save shapefile
save_shapefile_from_polygons(shifted_polygons, image_filepath, output_shapefile_filepath, properties_list=properties_list)
if __name__ == "__main__":
main()
| 12,156 | 32.86351 | 187 | py |
mapalignment | mapalignment-master/projects/utils/math_utils.py | import numpy as np
import time
import sklearn.datasets
import skimage.transform
import python_utils
import image_utils
# if python_utils.module_exists("matplotlib.pyplot"):
# import matplotlib.pyplot as plt
CV2 = False
if python_utils.module_exists("cv2"):
import cv2
CV2 = True
# import multiprocessing
#
# import python_utils
#
# if python_utils.module_exists("joblib"):
# from joblib import Parallel, delayed
# JOBLIB = True
# else:
# JOBLIB = False
# def plot_field_map(field_map):
# from mpl_toolkits.mplot3d import Axes3D
#
# row = np.linspace(0, 1, field_map.shape[0])
# col = np.linspace(0, 1, field_map.shape[1])
# rr, cc = np.meshgrid(row, col, indexing='ij')
#
# fig = plt.figure(figsize=(18, 9))
# ax = fig.add_subplot(121, projection='3d')
# ax.plot_surface(rr, cc, field_map[:, :, 0], rstride=3, cstride=3, linewidth=1, antialiased=True)
#
# ax = fig.add_subplot(122, projection='3d')
# ax.plot_surface(rr, cc, field_map[:, :, 1], rstride=3, cstride=3, linewidth=1, antialiased=True)
#
# plt.show()
# --- Classes --- #
class DispFieldMapsPatchCreator:
def __init__(self, global_shape, patch_res, map_count, modes, gauss_mu_range, gauss_sig_scaling):
self.global_shape = global_shape
self.patch_res = patch_res
self.map_count = map_count
self.modes = modes
self.gauss_mu_range = gauss_mu_range
self.gauss_sig_scaling = gauss_sig_scaling
self.current_patch_index = -1
self.patch_boundingboxes = image_utils.compute_patch_boundingboxes(self.global_shape, stride=self.patch_res, patch_res=self.patch_res)
self.disp_maps = None
self.create_new_disp_maps()
def create_new_disp_maps(self):
print("DispFieldMapsPatchCreator.create_new_disp_maps()")
self.disp_maps = create_displacement_field_maps(self.global_shape, self.map_count, self.modes, self.gauss_mu_range, self.gauss_sig_scaling)
def get_patch(self):
self.current_patch_index += 1
if len(self.patch_boundingboxes) <= self.current_patch_index:
self.current_patch_index = 0
self.create_new_disp_maps()
patch_boundingbox = self.patch_boundingboxes[self.current_patch_index]
patch_disp_maps = self.disp_maps[:, patch_boundingbox[0]:patch_boundingbox[2], patch_boundingbox[1]:patch_boundingbox[3], :]
return patch_disp_maps
# --- --- #
def to_homogeneous(array):
new_array = np.ones((array.shape[0], array.shape[1] + 1), dtype=array.dtype)
new_array[..., :-1] = array
return new_array
def to_euclidian(array_homogeneous):
array = array_homogeneous[:, 0:2] / array_homogeneous[:, 2:3]
return array
def stretch(array):
mini = np.min(array)
maxi = np.max(array)
if maxi - mini:
array -= mini
array *= 2 / (maxi - mini)
array -= 1
return array
def crop_center(array, out_shape):
assert len(out_shape) == 2, "out_shape should be of length 2"
in_shape = np.array(array.shape[:2])
start = in_shape // 2 - (out_shape // 2)
out_array = array[start[0]:start[0] + out_shape[0], start[1]:start[1] + out_shape[1], ...]
return out_array
def multivariate_gaussian(pos, mu, sigma):
"""Return the multivariate Gaussian distribution on array pos.
pos is an array constructed by packing the meshed arrays of variables
x_1, x_2, x_3, ..., x_k into its _last_ dimension.
"""
n = mu.shape[0]
sigma_det = np.linalg.det(sigma)
sigma_inv = np.linalg.inv(sigma)
N = np.sqrt((2 * np.pi) ** n * sigma_det)
# This einsum call calculates (x-mu)T.sigma-1.(x-mu) in a vectorized
# way across all the input variables.
# print("\tStarting to create multivariate Gaussian")
# start = time.time()
# print((pos - mu).shape)
# print(sigma_inv.shape)
try:
fac = np.einsum('...k,kl,...l->...', pos - mu, sigma_inv, pos - mu, optimize=True)
except:
fac = np.einsum('...k,kl,...l->...', pos - mu, sigma_inv, pos - mu)
# print(fac.shape)
# end = time.time()
# print("\tFinished Gaussian in {}s".format(end - start))
return np.exp(-fac / 2) / N
def create_multivariate_gaussian_mixture_map(shape, mode_count, mu_range, sig_scaling):
shape = np.array(shape)
# print("Starting to create multivariate Gaussian mixture")
# main_start = time.time()
dim_count = 2
downsample_factor = 4
dtype = np.float32
mu_scale = mu_range[1] - mu_range[0]
row = np.linspace(mu_range[0], mu_range[1], mu_scale*shape[0]/downsample_factor, dtype=dtype)
col = np.linspace(mu_range[0], mu_range[1], mu_scale*shape[1]/downsample_factor, dtype=dtype)
rr, cc = np.meshgrid(row, col, indexing='ij')
grid = np.stack([rr, cc], axis=2)
mus = np.random.uniform(mu_range[0], mu_range[1], (mode_count, dim_count, 2)).astype(dtype)
# gams = np.random.rand(mode_count, dim_count, 2, 2).astype(dtype)
signs = np.random.choice([1, -1], size=(mode_count, dim_count))
# print("\tAdding gaussian mixtures one by one")
# start = time.time()
# if JOBLIB:
# # Parallel computing of multivariate gaussians
# inputs = range(8)
#
# def processInput(i):
# size = 10 * i + 2000
# a = np.random.random_sample((size, size))
# b = np.random.random_sample((size, size))
# n = np.dot(a, b)
# return n
#
# num_cores = multiprocessing.cpu_count()
# print("num_cores: {}".format(num_cores))
# # num_cores = 1
#
# results = Parallel(n_jobs=num_cores)(delayed(processInput)(i) for i in inputs)
# for result in results:
# print(result.shape)
#
# gaussian_mixture = np.zeros_like(grid)
# else:
gaussian_mixture = np.zeros_like(grid)
for mode_index in range(mode_count):
for dim in range(dim_count):
sig = (sig_scaling[1] - sig_scaling[0]) * sklearn.datasets.make_spd_matrix(2) + sig_scaling[0]
# sig = (sig_scaling[1] - sig_scaling[0]) * np.dot(gams[mode_index, dim], np.transpose(gams[mode_index, dim])) + sig_scaling[0]
sig = sig.astype(dtype)
multivariate_gaussian_grid = signs[mode_index, dim] * multivariate_gaussian(grid, mus[mode_index, dim], sig)
gaussian_mixture[:, :, dim] += multivariate_gaussian_grid
# end = time.time()
# print("\tFinished adding gaussian mixtures in {}s".format(end - start))
# squared_gaussian_mixture = np.square(gaussian_mixture)
# magnitude_disp_field_map = np.sqrt(squared_gaussian_mixture[:, :, 0] + squared_gaussian_mixture[:, :, 1])
# max_magnitude = magnitude_disp_field_map.max()
gaussian_mixture[:, :, 0] = stretch(gaussian_mixture[:, :, 0])
gaussian_mixture[:, :, 1] = stretch(gaussian_mixture[:, :, 1])
# Crop
gaussian_mixture = crop_center(gaussian_mixture, shape//downsample_factor)
# plot_field_map(gaussian_mixture)
# Upsample mixture
# gaussian_mixture = skimage.transform.rescale(gaussian_mixture, downsample_factor)
gaussian_mixture = skimage.transform.resize(gaussian_mixture, shape)
main_end = time.time()
# print("Finished multivariate Gaussian mixture in {}s".format(main_end - main_start))
return gaussian_mixture
def create_displacement_field_maps(shape, map_count, modes, gauss_mu_range, gauss_sig_scaling, seed=None):
if seed is not None:
np.random.seed(seed)
disp_field_maps_list = []
for disp_field_map_index in range(map_count):
disp_field_map_normed = create_multivariate_gaussian_mixture_map(shape,
modes,
gauss_mu_range,
gauss_sig_scaling)
disp_field_maps_list.append(disp_field_map_normed)
disp_field_maps = np.stack(disp_field_maps_list, axis=0)
return disp_field_maps
def get_h_mat(t, theta, scale_offset, shear, p):
"""
Computes the homography matrix given the parameters
See https://medium.com/uruvideo/dataset-augmentation-with-random-homographies-a8f4b44830d4
(fixed mistake in H_a)
:param t: 2D translation vector
:param theta: Scalar angle
:param scale_offset: 2D scaling vector
:param shear: 2D shearing vector
:param p: 2D projection vector
:return: h_mat: shape (3, 3)
"""
cos_theta = np.cos(theta)
sin_theta = np.sin(theta)
h_e = np.array([
[cos_theta, -sin_theta, t[0]],
[sin_theta, cos_theta, t[1]],
[0, 0, 1],
])
h_a = np.array([
[1 + scale_offset[0], shear[1], 0],
[shear[0], 1 + scale_offset[1], 0],
[0, 0, 1],
])
h_p = np.array([
[1, 0, 0],
[0, 1, 0],
[p[0], p[1], 1],
])
h_mat = h_e @ h_a @ h_p
return h_mat
if CV2:
def find_homography_4pt(src, dst):
"""
Estimates the homography that transforms src points into dst points.
Then converts the matrix representation into the 4 points representation.
:param src:
:param dst:
:return:
"""
h_mat, _ = cv2.findHomography(src, dst)
h_4pt = convert_h_mat_to_4pt(h_mat)
return h_4pt
def convert_h_mat_to_4pt(h_mat):
src_4pt = np.array([[
[-1, -1],
[1, -1],
[1, 1],
[-1, 1],
]], dtype=np.float64)
h_4pt = cv2.perspectiveTransform(src_4pt, h_mat)
return h_4pt
def convert_h_4pt_to_mat(h_4pt):
src_4pt = np.array([
[-1, -1],
[1, -1],
[1, 1],
[-1, 1],
], dtype=np.float32)
h_4pt = h_4pt.astype(np.float32)
h_mat = cv2.getPerspectiveTransform(src_4pt, h_4pt)
return h_mat
def field_map_to_image(field_map):
mag, ang = cv2.cartToPolar(field_map[..., 0], field_map[..., 1])
hsv = np.zeros((field_map.shape[0], field_map.shape[1], 3))
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 1] = 255
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
hsv = hsv.astype(np.uint8)
rgb = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
return rgb
else:
def find_homography_4pt(src, dst):
print("cv2 is not available, the find_homography_4pt(src, dst) function cannot work!")
def convert_h_mat_to_4pt(h_mat):
print("cv2 is not available, the convert_h_mat_to_4pt(h_mat) function cannot work!")
def convert_h_4pt_to_mat(h_4pt):
print("cv2 is not available, the convert_h_4pt_to_mat(h_4pt) function cannot work!")
def field_map_to_image(field_map):
print("cv2 is not available, the field_map_to_image(field_map) function cannot work!")
def main():
shape = (220, 220)
mode_count = 30
mu_range = [0, 1]
sig_scaling = [0.0, 0.002]
create_multivariate_gaussian_mixture_map(shape, mode_count, mu_range, sig_scaling)
if __name__ == "__main__":
main()
| 11,176 | 31.873529 | 147 | py |
mapalignment | mapalignment-master/projects/utils/run_utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import datetime
from jsmin import jsmin
import json
import random
import print_utils
import python_utils
# Stolen from Docker:
NAME_SET = set([
# Muhammad ibn Jābir al-Ḥarrānī al-Battānī was a founding father of astronomy. https://en.wikipedia.org/wiki/Mu%E1%B8%A5ammad_ibn_J%C4%81bir_al-%E1%B8%A4arr%C4%81n%C4%AB_al-Batt%C4%81n%C4%AB
"albattani",
# Frances E. Allen, became the first female IBM Fellow in 1989. In 2006, she became the first female recipient of the ACM's Turing Award. https://en.wikipedia.org/wiki/Frances_E._Allen
"allen",
# June Almeida - Scottish virologist who took the first pictures of the rubella virus - https://en.wikipedia.org/wiki/June_Almeida
"almeida",
# Maria Gaetana Agnesi - Italian mathematician, philosopher, theologian and humanitarian. She was the first woman to write a mathematics handbook and the first woman appointed as a Mathematics Professor at a University. https://en.wikipedia.org/wiki/Maria_Gaetana_Agnesi
"agnesi",
# Archimedes was a physicist, engineer and mathematician who invented too many things to list them here. https://en.wikipedia.org/wiki/Archimedes
"archimedes",
# Maria Ardinghelli - Italian translator, mathematician and physicist - https://en.wikipedia.org/wiki/Maria_Ardinghelli
"ardinghelli",
# Aryabhata - Ancient Indian mathematician-astronomer during 476-550 CE https://en.wikipedia.org/wiki/Aryabhata
"aryabhata",
# Wanda Austin - Wanda Austin is the President and CEO of The Aerospace Corporation, a leading architect for the US security space programs. https://en.wikipedia.org/wiki/Wanda_Austin
"austin",
# Charles Babbage invented the concept of a programmable computer. https://en.wikipedia.org/wiki/Charles_Babbage.
"babbage",
# Stefan Banach - Polish mathematician, was one of the founders of modern functional analysis. https://en.wikipedia.org/wiki/Stefan_Banach
"banach",
# John Bardeen co-invented the transistor - https://en.wikipedia.org/wiki/John_Bardeen
"bardeen",
# Jean Bartik, born Betty Jean Jennings, was one of the original programmers for the ENIAC computer. https://en.wikipedia.org/wiki/Jean_Bartik
"bartik",
# Laura Bassi, the world's first female professor https://en.wikipedia.org/wiki/Laura_Bassi
"bassi",
# Hugh Beaver, British engineer, founder of the Guinness Book of World Records https://en.wikipedia.org/wiki/Hugh_Beaver
"beaver",
# Alexander Graham Bell - an eminent Scottish-born scientist, inventor, engineer and innovator who is credited with inventing the first practical telephone - https://en.wikipedia.org/wiki/Alexander_Graham_Bell
"bell",
# Karl Friedrich Benz - a German automobile engineer. Inventor of the first practical motorcar. https://en.wikipedia.org/wiki/Karl_Benz
"benz",
# Homi J Bhabha - was an Indian nuclear physicist, founding director, and professor of physics at the Tata Institute of Fundamental Research. Colloquially known as "father of Indian nuclear programme"- https://en.wikipedia.org/wiki/Homi_J._Bhabha
"bhabha",
# Bhaskara II - Ancient Indian mathematician-astronomer whose work on calculus predates Newton and Leibniz by over half a millennium - https://en.wikipedia.org/wiki/Bh%C4%81skara_II#Calculus
"bhaskara",
# Elizabeth Blackwell - American doctor and first American woman to receive a medical degree - https://en.wikipedia.org/wiki/Elizabeth_Blackwell
"blackwell",
# Niels Bohr is the father of quantum theory. https://en.wikipedia.org/wiki/Niels_Bohr.
"bohr",
# Kathleen Booth, she's credited with writing the first assembly language. https://en.wikipedia.org/wiki/Kathleen_Booth
"booth",
# Anita Borg - Anita Borg was the founding director of the Institute for Women and Technology (IWT). https://en.wikipedia.org/wiki/Anita_Borg
"borg",
# Satyendra Nath Bose - He provided the foundation for Bose–Einstein statistics and the theory of the Bose–Einstein condensate. - https://en.wikipedia.org/wiki/Satyendra_Nath_Bose
"bose",
# Evelyn Boyd Granville - She was one of the first African-American woman to receive a Ph.D. in mathematics; she earned it in 1949 from Yale University. https://en.wikipedia.org/wiki/Evelyn_Boyd_Granville
"boyd",
# Brahmagupta - Ancient Indian mathematician during 598-670 CE who gave rules to compute with zero - https://en.wikipedia.org/wiki/Brahmagupta#Zero
"brahmagupta",
# Walter Houser Brattain co-invented the transistor - https://en.wikipedia.org/wiki/Walter_Houser_Brattain
"brattain",
# Emmett Brown invented time travel. https://en.wikipedia.org/wiki/Emmett_Brown (thanks Brian Goff)
"brown",
# Rachel Carson - American marine biologist and conservationist, her book Silent Spring and other writings are credited with advancing the global environmental movement. https://en.wikipedia.org/wiki/Rachel_Carson
"carson",
# Subrahmanyan Chandrasekhar - Astrophysicist known for his mathematical theory on different stages and evolution in structures of the stars. He has won nobel prize for physics - https://en.wikipedia.org/wiki/Subrahmanyan_Chandrasekhar
"chandrasekhar",
# Sergey Alexeyevich Chaplygin (Russian: Серге́й Алексе́евич Чаплы́гин; April 5, 1869 – October 8, 1942) was a Russian and Soviet physicist, mathematician, and mechanical engineer. He is known for mathematical formulas such as Chaplygin's equation and for a hypothetical substance in cosmology called Chaplygin gas, named after him. https://en.wikipedia.org/wiki/Sergey_Chaplygin
"chaplygin",
# Asima Chatterjee was an indian organic chemist noted for her research on vinca alkaloids, development of drugs for treatment of epilepsy and malaria - https://en.wikipedia.org/wiki/Asima_Chatterjee
"chatterjee",
# Pafnuty Chebyshev - Russian mathematitian. He is known fo his works on probability, statistics, mechanics, analytical geometry and number theory https://en.wikipedia.org/wiki/Pafnuty_Chebyshev
"chebyshev",
# Claude Shannon - The father of information theory and founder of digital circuit design theory. (https://en.wikipedia.org/wiki/Claude_Shannon)
"shannon",
# Joan Clarke - Bletchley Park code breaker during the Second World War who pioneered techniques that remained top secret for decades. Also an accomplished numismatist https://en.wikipedia.org/wiki/Joan_Clarke
"clarke",
# Jane Colden - American botanist widely considered the first female American botanist - https://en.wikipedia.org/wiki/Jane_Colden
"colden",
# Gerty Theresa Cori - American biochemist who became the third woman—and first American woman—to win a Nobel Prize in science, and the first woman to be awarded the Nobel Prize in Physiology or Medicine. Cori was born in Prague. https://en.wikipedia.org/wiki/Gerty_Cori
"cori",
# Seymour Roger Cray was an American electrical engineer and supercomputer architect who designed a series of computers that were the fastest in the world for decades. https://en.wikipedia.org/wiki/Seymour_Cray
"cray",
# This entry reflects a husband and wife team who worked together:
# Joan Curran was a Welsh scientist who developed radar and invented chaff, a radar countermeasure. https://en.wikipedia.org/wiki/Joan_Curran
# Samuel Curran was an Irish physicist who worked alongside his wife during WWII and invented the proximity fuse. https://en.wikipedia.org/wiki/Samuel_Curran
"curran",
# Marie Curie discovered radioactivity. https://en.wikipedia.org/wiki/Marie_Curie.
"curie",
# Charles Darwin established the principles of natural evolution. https://en.wikipedia.org/wiki/Charles_Darwin.
"darwin",
# Leonardo Da Vinci invented too many things to list here. https://en.wikipedia.org/wiki/Leonardo_da_Vinci.
"davinci",
# Edsger Wybe Dijkstra was a Dutch computer scientist and mathematical scientist. https://en.wikipedia.org/wiki/Edsger_W._Dijkstra.
"dijkstra",
# Donna Dubinsky - played an integral role in the development of personal digital assistants (PDAs) serving as CEO of Palm, Inc. and co-founding Handspring. https://en.wikipedia.org/wiki/Donna_Dubinsky
"dubinsky",
# Annie Easley - She was a leading member of the team which developed software for the Centaur rocket stage and one of the first African-Americans in her field. https://en.wikipedia.org/wiki/Annie_Easley
"easley",
# Thomas Alva Edison, prolific inventor https://en.wikipedia.org/wiki/Thomas_Edison
"edison",
# Albert Einstein invented the general theory of relativity. https://en.wikipedia.org/wiki/Albert_Einstein
"einstein",
# Gertrude Elion - American biochemist, pharmacologist and the 1988 recipient of the Nobel Prize in Medicine - https://en.wikipedia.org/wiki/Gertrude_Elion
"elion",
# Alexandra Asanovna Elbakyan (Russian: Алекса́ндра Аса́новна Элбакя́н) is a Kazakhstani graduate student, computer programmer, internet pirate in hiding, and the creator of the site Sci-Hub. Nature has listed her in 2016 in the top ten people that mattered in science, and Ars Technica has compared her to Aaron Swartz. - https://en.wikipedia.org/wiki/Alexandra_Elbakyan
"elbakyan",
# Douglas Engelbart gave the mother of all demos: https://en.wikipedia.org/wiki/Douglas_Engelbart
"engelbart",
# Euclid invented geometry. https://en.wikipedia.org/wiki/Euclid
"euclid",
# Leonhard Euler invented large parts of modern mathematics. https://de.wikipedia.org/wiki/Leonhard_Euler
"euler",
# Pierre de Fermat pioneered several aspects of modern mathematics. https://en.wikipedia.org/wiki/Pierre_de_Fermat
"fermat",
# Enrico Fermi invented the first nuclear reactor. https://en.wikipedia.org/wiki/Enrico_Fermi.
"fermi",
# Richard Feynman was a key contributor to quantum mechanics and particle physics. https://en.wikipedia.org/wiki/Richard_Feynman
"feynman",
# Benjamin Franklin is famous for his experiments in electricity and the invention of the lightning rod.
"franklin",
# Galileo was a founding father of modern astronomy, and faced politics and obscurantism to establish scientific truth. https://en.wikipedia.org/wiki/Galileo_Galilei
"galileo",
# William Henry "Bill" Gates III is an American business magnate, philanthropist, investor, computer programmer, and inventor. https://en.wikipedia.org/wiki/Bill_Gates
"gates",
# Adele Goldberg, was one of the designers and developers of the Smalltalk language. https://en.wikipedia.org/wiki/Adele_Goldberg_(computer_scientist)
"goldberg",
# Adele Goldstine, born Adele Katz, wrote the complete technical description for the first electronic digital computer, ENIAC. https://en.wikipedia.org/wiki/Adele_Goldstine
"goldstine",
# Shafi Goldwasser is a computer scientist known for creating theoretical foundations of modern cryptography. Winner of 2012 ACM Turing Award. https://en.wikipedia.org/wiki/Shafi_Goldwasser
"goldwasser",
# James Golick, all around gangster.
"golick",
# Jane Goodall - British primatologist, ethologist, and anthropologist who is considered to be the world's foremost expert on chimpanzees - https://en.wikipedia.org/wiki/Jane_Goodall
"goodall",
# Lois Haibt - American computer scientist, part of the team at IBM that developed FORTRAN - https://en.wikipedia.org/wiki/Lois_Haibt
"haibt",
# Margaret Hamilton - Director of the Software Engineering Division of the MIT Instrumentation Laboratory, which developed on-board flight software for the Apollo space program. https://en.wikipedia.org/wiki/Margaret_Hamilton_(scientist)
"hamilton",
# Stephen Hawking pioneered the field of cosmology by combining general relativity and quantum mechanics. https://en.wikipedia.org/wiki/Stephen_Hawking
"hawking",
# Werner Heisenberg was a founding father of quantum mechanics. https://en.wikipedia.org/wiki/Werner_Heisenberg
"heisenberg",
# Grete Hermann was a German philosopher noted for her philosophical work on the foundations of quantum mechanics. https://en.wikipedia.org/wiki/Grete_Hermann
"hermann",
# Jaroslav Heyrovský was the inventor of the polarographic method, father of the electroanalytical method, and recipient of the Nobel Prize in 1959. His main field of work was polarography. https://en.wikipedia.org/wiki/Jaroslav_Heyrovsk%C3%BD
"heyrovsky",
# Dorothy Hodgkin was a British biochemist, credited with the development of protein crystallography. She was awarded the Nobel Prize in Chemistry in 1964. https://en.wikipedia.org/wiki/Dorothy_Hodgkin
"hodgkin",
# Erna Schneider Hoover revolutionized modern communication by inventing a computerized telephone switching method. https://en.wikipedia.org/wiki/Erna_Schneider_Hoover
"hoover",
# Grace Hopper developed the first compiler for a computer programming language and is credited with popularizing the term "debugging" for fixing computer glitches. https://en.wikipedia.org/wiki/Grace_Hopper
"hopper",
# Frances Hugle, she was an American scientist, engineer, and inventor who contributed to the understanding of semiconductors, integrated circuitry, and the unique electrical principles of microscopic materials. https://en.wikipedia.org/wiki/Frances_Hugle
"hugle",
# Hypatia - Greek Alexandrine Neoplatonist philosopher in Egypt who was one of the earliest mothers of mathematics - https://en.wikipedia.org/wiki/Hypatia
"hypatia",
# Mary Jackson, American mathematician and aerospace engineer who earned the highest title within NASA's engineering department - https://en.wikipedia.org/wiki/Mary_Jackson_(engineer)
"jackson",
# Yeong-Sil Jang was a Korean scientist and astronomer during the Joseon Dynasty; he invented the first metal printing press and water gauge. https://en.wikipedia.org/wiki/Jang_Yeong-sil
"jang",
# Betty Jennings - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Jean_Bartik
"jennings",
# Mary Lou Jepsen, was the founder and chief technology officer of One Laptop Per Child (OLPC), and the founder of Pixel Qi. https://en.wikipedia.org/wiki/Mary_Lou_Jepsen
"jepsen",
# Katherine Coleman Goble Johnson - American physicist and mathematician contributed to the NASA. https://en.wikipedia.org/wiki/Katherine_Johnson
"johnson",
# Irène Joliot-Curie - French scientist who was awarded the Nobel Prize for Chemistry in 1935. Daughter of Marie and Pierre Curie. https://en.wikipedia.org/wiki/Ir%C3%A8ne_Joliot-Curie
"joliot",
# Karen Spärck Jones came up with the concept of inverse document frequency, which is used in most search engines today. https://en.wikipedia.org/wiki/Karen_Sp%C3%A4rck_Jones
"jones",
# A. P. J. Abdul Kalam - is an Indian scientist aka Missile Man of India for his work on the development of ballistic missile and launch vehicle technology - https://en.wikipedia.org/wiki/A._P._J._Abdul_Kalam
"kalam",
# Sergey Petrovich Kapitsa (Russian: Серге́й Петро́вич Капи́ца; 14 February 1928 – 14 August 2012) was a Russian physicist and demographer. He was best known as host of the popular and long-running Russian scientific TV show, Evident, but Incredible. His father was the Nobel laureate Soviet-era physicist Pyotr Kapitsa, and his brother was the geographer and Antarctic explorer Andrey Kapitsa. - https://en.wikipedia.org/wiki/Sergey_Kapitsa
"kapitsa",
# Susan Kare, created the icons and many of the interface elements for the original Apple Macintosh in the 1980s, and was an original employee of NeXT, working as the Creative Director. https://en.wikipedia.org/wiki/Susan_Kare
"kare",
# Mstislav Keldysh - a Soviet scientist in the field of mathematics and mechanics, academician of the USSR Academy of Sciences (1946), President of the USSR Academy of Sciences (1961–1975), three times Hero of Socialist Labor (1956, 1961, 1971), fellow of the Royal Society of Edinburgh (1968). https://en.wikipedia.org/wiki/Mstislav_Keldysh
"keldysh",
# Mary Kenneth Keller, Sister Mary Kenneth Keller became the first American woman to earn a PhD in Computer Science in 1965. https://en.wikipedia.org/wiki/Mary_Kenneth_Keller
"keller",
# Johannes Kepler, German astronomer known for his three laws of planetary motion - https://en.wikipedia.org/wiki/Johannes_Kepler
"kepler",
# Har Gobind Khorana - Indian-American biochemist who shared the 1968 Nobel Prize for Physiology - https://en.wikipedia.org/wiki/Har_Gobind_Khorana
"khorana",
# Jack Kilby invented silicone integrated circuits and gave Silicon Valley its name. - https://en.wikipedia.org/wiki/Jack_Kilby
"kilby",
# Maria Kirch - German astronomer and first woman to discover a comet - https://en.wikipedia.org/wiki/Maria_Margarethe_Kirch
"kirch",
# Donald Knuth - American computer scientist, author of "The Art of Computer Programming" and creator of the TeX typesetting system. https://en.wikipedia.org/wiki/Donald_Knuth
"knuth",
# Sophie Kowalevski - Russian mathematician responsible for important original contributions to analysis, differential equations and mechanics - https://en.wikipedia.org/wiki/Sofia_Kovalevskaya
"kowalevski",
# Marie-Jeanne de Lalande - French astronomer, mathematician and cataloguer of stars - https://en.wikipedia.org/wiki/Marie-Jeanne_de_Lalande
"lalande",
# Hedy Lamarr - Actress and inventor. The principles of her work are now incorporated into modern Wi-Fi, CDMA and Bluetooth technology. https://en.wikipedia.org/wiki/Hedy_Lamarr
"lamarr",
# Leslie B. Lamport - American computer scientist. Lamport is best known for his seminal work in distributed systems and was the winner of the 2013 Turing Award. https://en.wikipedia.org/wiki/Leslie_Lamport
"lamport",
# Mary Leakey - British paleoanthropologist who discovered the first fossilized Proconsul skull - https://en.wikipedia.org/wiki/Mary_Leakey
"leakey",
# Henrietta Swan Leavitt - she was an American astronomer who discovered the relation between the luminosity and the period of Cepheid variable stars. https://en.wikipedia.org/wiki/Henrietta_Swan_Leavitt
"leavitt",
# Daniel Lewin - Mathematician, Akamai co-founder, soldier, 9/11 victim-- Developed optimization techniques for routing traffic on the internet. Died attempting to stop the 9-11 hijackers. https://en.wikipedia.org/wiki/Daniel_Lewin
"lewin",
# Ruth Lichterman - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Ruth_Teitelbaum
"lichterman",
# Barbara Liskov - co-developed the Liskov substitution principle. Liskov was also the winner of the Turing Prize in 2008. - https://en.wikipedia.org/wiki/Barbara_Liskov
"liskov",
# Ada Lovelace invented the first algorithm. https://en.wikipedia.org/wiki/Ada_Lovelace (thanks James Turnbull)
"lovelace",
# Auguste and Louis Lumière - the first filmmakers in history - https://en.wikipedia.org/wiki/Auguste_and_Louis_Lumi%C3%A8re
"lumiere",
# Mahavira - Ancient Indian mathematician during 9th century AD who discovered basic algebraic identities - https://en.wikipedia.org/wiki/Mah%C4%81v%C4%ABra_(mathematician)
"mahavira",
# Maria Mayer - American theoretical physicist and Nobel laureate in Physics for proposing the nuclear shell model of the atomic nucleus - https://en.wikipedia.org/wiki/Maria_Mayer
"mayer",
# John McCarthy invented LISP: https://en.wikipedia.org/wiki/John_McCarthy_(computer_scientist)
"mccarthy",
# Barbara McClintock - a distinguished American cytogeneticist, 1983 Nobel Laureate in Physiology or Medicine for discovering transposons. https://en.wikipedia.org/wiki/Barbara_McClintock
"mcclintock",
# Malcolm McLean invented the modern shipping container: https://en.wikipedia.org/wiki/Malcom_McLean
"mclean",
# Kay McNulty - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Kathleen_Antonelli
"mcnulty",
# Dmitri Mendeleev - a chemist and inventor. He formulated the Periodic Law, created a farsighted version of the periodic table of elements, and used it to correct the properties of some already discovered elements and also to predict the properties of eight elements yet to be discovered. https://en.wikipedia.org/wiki/Dmitri_Mendeleev
"mendeleev",
# Lise Meitner - Austrian/Swedish physicist who was involved in the discovery of nuclear fission. The element meitnerium is named after her - https://en.wikipedia.org/wiki/Lise_Meitner
"meitner",
# Carla Meninsky, was the game designer and programmer for Atari 2600 games Dodge 'Em and Warlords. https://en.wikipedia.org/wiki/Carla_Meninsky
"meninsky",
# Johanna Mestorf - German prehistoric archaeologist and first female museum director in Germany - https://en.wikipedia.org/wiki/Johanna_Mestorf
"mestorf",
# Marvin Minsky - Pioneer in Artificial Intelligence, co-founder of the MIT's AI Lab, won the Turing Award in 1969. https://en.wikipedia.org/wiki/Marvin_Minsky
"minsky",
# Maryam Mirzakhani - an Iranian mathematician and the first woman to win the Fields Medal. https://en.wikipedia.org/wiki/Maryam_Mirzakhani
"mirzakhani",
# Samuel Morse - contributed to the invention of a single-wire telegraph system based on European telegraphs and was a co-developer of the Morse code - https://en.wikipedia.org/wiki/Samuel_Morse
"morse",
# Ian Murdock - founder of the Debian project - https://en.wikipedia.org/wiki/Ian_Murdock
"murdock",
# John von Neumann - todays computer architectures are based on the von Neumann architecture. https://en.wikipedia.org/wiki/Von_Neumann_architecture
"neumann",
# Isaac Newton invented classic mechanics and modern optics. https://en.wikipedia.org/wiki/Isaac_Newton
"newton",
# Florence Nightingale, more prominently known as a nurse, was also the first female member of the Royal Statistical Society and a pioneer in statistical graphics https://en.wikipedia.org/wiki/Florence_Nightingale#Statistics_and_sanitary_reform
"nightingale",
# Alfred Nobel - a Swedish chemist, engineer, innovator, and armaments manufacturer (inventor of dynamite) - https://en.wikipedia.org/wiki/Alfred_Nobel
"nobel",
# Emmy Noether, German mathematician. Noether's Theorem is named after her. https://en.wikipedia.org/wiki/Emmy_Noether
"noether",
# Poppy Northcutt. Poppy Northcutt was the first woman to work as part of NASA’s Mission Control. http://www.businessinsider.com/poppy-northcutt-helped-apollo-astronauts-2014-12?op=1
"northcutt",
# Robert Noyce invented silicone integrated circuits and gave Silicon Valley its name. - https://en.wikipedia.org/wiki/Robert_Noyce
"noyce",
# Panini - Ancient Indian linguist and grammarian from 4th century CE who worked on the world's first formal system - https://en.wikipedia.org/wiki/P%C4%81%E1%B9%87ini#Comparison_with_modern_formal_systems
"panini",
# Ambroise Pare invented modern surgery. https://en.wikipedia.org/wiki/Ambroise_Par%C3%A9
"pare",
# Louis Pasteur discovered vaccination, fermentation and pasteurization. https://en.wikipedia.org/wiki/Louis_Pasteur.
"pasteur",
# Cecilia Payne-Gaposchkin was an astronomer and astrophysicist who, in 1925, proposed in her Ph.D. thesis an explanation for the composition of stars in terms of the relative abundances of hydrogen and helium. https://en.wikipedia.org/wiki/Cecilia_Payne-Gaposchkin
"payne",
# Radia Perlman is a software designer and network engineer and most famous for her invention of the spanning-tree protocol (STP). https://en.wikipedia.org/wiki/Radia_Perlman
"perlman",
# Rob Pike was a key contributor to Unix, Plan 9, the X graphic system, utf-8, and the Go programming language. https://en.wikipedia.org/wiki/Rob_Pike
"pike",
# Henri Poincaré made fundamental contributions in several fields of mathematics. https://en.wikipedia.org/wiki/Henri_Poincar%C3%A9
"poincare",
# Laura Poitras is a director and producer whose work, made possible by open source crypto tools, advances the causes of truth and freedom of information by reporting disclosures by whistleblowers such as Edward Snowden. https://en.wikipedia.org/wiki/Laura_Poitras
"poitras",
# Tat’yana Avenirovna Proskuriakova (Russian: Татья́на Авени́ровна Проскуряко́ва) (January 23 [O.S. January 10] 1909 – August 30, 1985) was a Russian-American Mayanist scholar and archaeologist who contributed significantly to the deciphering of Maya hieroglyphs, the writing system of the pre-Columbian Maya civilization of Mesoamerica. https://en.wikipedia.org/wiki/Tatiana_Proskouriakoff
"proskuriakova",
# Claudius Ptolemy - a Greco-Egyptian writer of Alexandria, known as a mathematician, astronomer, geographer, astrologer, and poet of a single epigram in the Greek Anthology - https://en.wikipedia.org/wiki/Ptolemy
"ptolemy",
# C. V. Raman - Indian physicist who won the Nobel Prize in 1930 for proposing the Raman effect. - https://en.wikipedia.org/wiki/C._V._Raman
"raman",
# Srinivasa Ramanujan - Indian mathematician and autodidact who made extraordinary contributions to mathematical analysis, number theory, infinite series, and continued fractions. - https://en.wikipedia.org/wiki/Srinivasa_Ramanujan
"ramanujan",
# Sally Kristen Ride was an American physicist and astronaut. She was the first American woman in space, and the youngest American astronaut. https://en.wikipedia.org/wiki/Sally_Ride
"ride",
# Rita Levi-Montalcini - Won Nobel Prize in Physiology or Medicine jointly with colleague Stanley Cohen for the discovery of nerve growth factor (https://en.wikipedia.org/wiki/Rita_Levi-Montalcini)
"montalcini",
# Dennis Ritchie - co-creator of UNIX and the C programming language. - https://en.wikipedia.org/wiki/Dennis_Ritchie
"ritchie",
# Wilhelm Conrad Röntgen - German physicist who was awarded the first Nobel Prize in Physics in 1901 for the discovery of X-rays (Röntgen rays). https://en.wikipedia.org/wiki/Wilhelm_R%C3%B6ntgen
"roentgen",
# Rosalind Franklin - British biophysicist and X-ray crystallographer whose research was critical to the understanding of DNA - https://en.wikipedia.org/wiki/Rosalind_Franklin
"rosalind",
# Meghnad Saha - Indian astrophysicist best known for his development of the Saha equation, used to describe chemical and physical conditions in stars - https://en.wikipedia.org/wiki/Meghnad_Saha
"saha",
# Jean E. Sammet developed FORMAC, the first widely used computer language for symbolic manipulation of mathematical formulas. https://en.wikipedia.org/wiki/Jean_E._Sammet
"sammet",
# Carol Shaw - Originally an Atari employee, Carol Shaw is said to be the first female video game designer. https://en.wikipedia.org/wiki/Carol_Shaw_(video_game_designer)
"shaw",
# Dame Stephanie "Steve" Shirley - Founded a software company in 1962 employing women working from home. https://en.wikipedia.org/wiki/Steve_Shirley
"shirley",
# William Shockley co-invented the transistor - https://en.wikipedia.org/wiki/William_Shockley
"shockley",
# Françoise Barré-Sinoussi - French virologist and Nobel Prize Laureate in Physiology or Medicine; her work was fundamental in identifying HIV as the cause of AIDS. https://en.wikipedia.org/wiki/Fran%C3%A7oise_Barr%C3%A9-Sinoussi
"sinoussi",
# Betty Snyder - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Betty_Holberton
"snyder",
# Frances Spence - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Frances_Spence
"spence",
# Richard Matthew Stallman - the founder of the Free Software movement, the GNU project, the Free Software Foundation, and the League for Programming Freedom. He also invented the concept of copyleft to protect the ideals of this movement, and enshrined this concept in the widely-used GPL (General Public License) for software. https://en.wikiquote.org/wiki/Richard_Stallman
"stallman",
# Lina Solomonovna Stern (or Shtern; Russian: Лина Соломоновна Штерн; 26 August 1878 – 7 March 1968) was a Soviet biochemist, physiologist and humanist whose medical discoveries saved thousands of lives at the fronts of World War II. She is best known for her pioneering work on blood–brain barrier, which she described as hemato-encephalic barrier in 1921. https://en.wikipedia.org/wiki/Lina_Stern
"shtern",
# Michael Stonebraker is a database research pioneer and architect of Ingres, Postgres, VoltDB and SciDB. Winner of 2014 ACM Turing Award. https://en.wikipedia.org/wiki/Michael_Stonebraker
"stonebraker",
# Janese Swanson (with others) developed the first of the Carmen Sandiego games. She went on to found Girl Tech. https://en.wikipedia.org/wiki/Janese_Swanson
"swanson",
# Aaron Swartz was influential in creating RSS, Markdown, Creative Commons, Reddit, and much of the internet as we know it today. He was devoted to freedom of information on the web. https://en.wikiquote.org/wiki/Aaron_Swartz
"swartz",
# Bertha Swirles was a theoretical physicist who made a number of contributions to early quantum theory. https://en.wikipedia.org/wiki/Bertha_Swirles
"swirles",
# Valentina Tereshkova is a russian engineer, cosmonaut and politician. She was the first woman flying to space in 1963. In 2013, at the age of 76, she offered to go on a one-way mission to mars. https://en.wikipedia.org/wiki/Valentina_Tereshkova
"tereshkova",
# Nikola Tesla invented the AC electric system and every gadget ever used by a James Bond villain. https://en.wikipedia.org/wiki/Nikola_Tesla
"tesla",
# Ken Thompson - co-creator of UNIX and the C programming language - https://en.wikipedia.org/wiki/Ken_Thompson
"thompson",
# Linus Torvalds invented Linux and Git. https://en.wikipedia.org/wiki/Linus_Torvalds
"torvalds",
# Alan Turing was a founding father of computer science. https://en.wikipedia.org/wiki/Alan_Turing.
"turing",
# Varahamihira - Ancient Indian mathematician who discovered trigonometric formulae during 505-587 CE - https://en.wikipedia.org/wiki/Var%C4%81hamihira#Contributions
"varahamihira",
# Dorothy Vaughan was a NASA mathematician and computer programmer on the SCOUT launch vehicle program that put America's first satellites into space - https://en.wikipedia.org/wiki/Dorothy_Vaughan
"vaughan",
# Sir Mokshagundam Visvesvaraya - is a notable Indian engineer. He is a recipient of the Indian Republic's highest honour, the Bharat Ratna, in 1955. On his birthday, 15 September is celebrated as Engineer's Day in India in his memory - https://en.wikipedia.org/wiki/Visvesvaraya
"visvesvaraya",
# Christiane Nüsslein-Volhard - German biologist, won Nobel Prize in Physiology or Medicine in 1995 for research on the genetic control of embryonic development. https://en.wikipedia.org/wiki/Christiane_N%C3%BCsslein-Volhard
"volhard",
# Cédric Villani - French mathematician, won Fields Medal, Fermat Prize and Poincaré Price for his work in differential geometry and statistical mechanics. https://en.wikipedia.org/wiki/C%C3%A9dric_Villani
"villani",
# Marlyn Wescoff - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Marlyn_Meltzer
"wescoff",
# Andrew Wiles - Notable British mathematician who proved the enigmatic Fermat's Last Theorem - https://en.wikipedia.org/wiki/Andrew_Wiles
"wiles",
# Roberta Williams, did pioneering work in graphical adventure games for personal computers, particularly the King's Quest series. https://en.wikipedia.org/wiki/Roberta_Williams
"williams",
# Sophie Wilson designed the first Acorn Micro-Computer and the instruction set for ARM processors. https://en.wikipedia.org/wiki/Sophie_Wilson
"wilson",
# Jeannette Wing - co-developed the Liskov substitution principle. - https://en.wikipedia.org/wiki/Jeannette_Wing
"wing",
# Steve Wozniak invented the Apple I and Apple II. https://en.wikipedia.org/wiki/Steve_Wozniak
"wozniak",
# The Wright brothers, Orville and Wilbur - credited with inventing and building the world's first successful airplane and making the first controlled, powered and sustained heavier-than-air human flight - https://en.wikipedia.org/wiki/Wright_brothers
"wright",
# Rosalyn Sussman Yalow - Rosalyn Sussman Yalow was an American medical physicist, and a co-winner of the 1977 Nobel Prize in Physiology or Medicine for development of the radioimmunoassay technique. https://en.wikipedia.org/wiki/Rosalyn_Sussman_Yalow
"yalow",
# Ada Yonath - an Israeli crystallographer, the first woman from the Middle East to win a Nobel prize in the sciences. https://en.wikipedia.org/wiki/Ada_Yonath
"yonath",
# Nikolay Yegorovich Zhukovsky (Russian: Никола́й Его́рович Жуко́вский, January 17 1847 – March 17, 1921) was a Russian scientist, mathematician and engineer, and a founding father of modern aero- and hydrodynamics. Whereas contemporary scientists scoffed at the idea of human flight, Zhukovsky was the first to undertake the study of airflow. He is often called the Father of Russian Aviation. https://en.wikipedia.org/wiki/Nikolay_Yegorovich_Zhukovsky
"zhukovsky",
])
def setup_run_dir(runs_dirpath, run_name=None, new_run=False, check_exists=False):
"""
If new_run is True, creates a new directory:
If run_name is None, generate a random name
else build the created directory name with run_name
If new_run is False, return an existing directory:
if run_name is None, return the last created directory (from timestamp)
else return the last created directory (from timestamp) whose name starts with run_name,
if that does not exist and check_exists is False create a new run with run_name,
if check_exists is True, then raise an error.
Special case: if there is no existing runs, the new_run is not taken into account and the function behaves like new_run is True.
:param runs_dirpath: Parent directory path of all the runs
:param run_name:
:param new_run:
:param check_exists:
:return: Run directory path. The directory name is in the form "run_name | timestamp"
"""
# Create runs directory of it does not exist
if not os.path.exists(runs_dirpath):
os.makedirs(runs_dirpath)
existing_run_dirnames = os.listdir(runs_dirpath)
if new_run or (not new_run and not 0 < len(existing_run_dirnames)):
if run_name is not None:
# Create another directory name for the run, with its name starting with run_name
name_timestamped = create_name_timestamped(run_name)
else:
# Create another directory name for the run, excluding the existing names
existing_run_names = [existing_run_dirname.split(" | ")[0] for existing_run_dirname in
existing_run_dirnames]
name_timestamped = create_free_name_timestamped(exclude_list=existing_run_names)
current_run_dirpath = os.path.join(runs_dirpath, name_timestamped)
os.mkdir(current_run_dirpath)
else:
if run_name is not None:
# Pick run dir based on run_name
filtered_existing_run_dirnames = [existing_run_dirname for existing_run_dirname in existing_run_dirnames if
existing_run_dirname.split(" | ")[0] == run_name]
if filtered_existing_run_dirnames:
filtered_existing_run_timestamps = [filtered_existing_run_dirname.split(" | ")[1] for
filtered_existing_run_dirname in
filtered_existing_run_dirnames]
filtered_last_index = filtered_existing_run_timestamps.index(max(filtered_existing_run_timestamps))
current_run_dirname = filtered_existing_run_dirnames[filtered_last_index]
else:
if check_exists:
raise FileNotFoundError("Run '{}' does not exist.".format(run_name))
else:
return setup_run_dir(runs_dirpath, run_name=run_name, new_run=True)
else:
# Pick last run dir based on timestamp
existing_run_timestamps = [existing_run_dirname.split(" | ")[1] for existing_run_dirname in
existing_run_dirnames]
last_index = existing_run_timestamps.index(max(existing_run_timestamps))
current_run_dirname = existing_run_dirnames[last_index]
current_run_dirpath = os.path.join(runs_dirpath, current_run_dirname)
return current_run_dirpath
def create_name_timestamped(name):
timestamp = time.time()
formatted_timestamp = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
name_timestamped = name + " | " + formatted_timestamp
return name_timestamped
def create_free_name_timestamped(exclude_list=None):
if exclude_list is not None:
names = list(NAME_SET - set(exclude_list))
else:
names = list(NAME_SET)
assert 0 < len(
names), "In create_random_name_timestamped(), all possible names have been used. Cannot create a new name without a collision! Delete some runs to continue..."
sorted_names = sorted(names)
name = sorted_names[0]
name_timestamped = create_name_timestamped(name)
return name_timestamped
def setup_run_subdirs(run_dir, logs_dirname="logs", checkpoints_dirname="checkpoints"):
logs_dir = os.path.join(run_dir, logs_dirname)
checkpoints_dir = os.path.join(run_dir, checkpoints_dirname)
if not os.path.exists(logs_dir):
os.makedirs(logs_dir)
if not os.path.exists(checkpoints_dir):
os.makedirs(checkpoints_dir)
return logs_dir, checkpoints_dir
def wipe_run_subdirs(run_dir, logs_dirname="logs", checkpoints_dirname="checkpoints"):
logs_dir = os.path.join(run_dir, logs_dirname)
checkpoints_dir = os.path.join(run_dir, checkpoints_dirname)
python_utils.wipe_dir(logs_dir)
python_utils.wipe_dir(checkpoints_dir)
def save_config(config, config_dirpath):
filepath = os.path.join(config_dirpath, 'config.json')
with open(filepath, 'w') as outfile:
json.dump(config, outfile)
# shutil.copyfile(os.path.join(project_dir, "config.py"), os.path.join(current_logs_dir, "config.py"))
def load_config(config_name="config", config_dirpath=""):
config_filepath = os.path.join(config_dirpath, config_name + ".json")
try:
with open(config_filepath, 'r') as f:
minified = jsmin(f.read())
config = json.loads(minified)
return config
except FileNotFoundError:
if config_name == "config" and config_dirpath == "":
print_utils.print_warning(
"WARNING: the default config file was not found....")
return None
else:
print_utils.print_warning(
"WARNING: config file {} was not found, opening default config file config.json instead.".format(
config_filepath))
return load_config()
| 39,862 | 59.216012 | 457 | py |
mapalignment | mapalignment-master/data/mapping_challenge_dataset/read.py | import sys
sys.path.append("../utils")
import visualization
from pycocotools.coco import COCO
from pycocotools import mask as cocomask
import numpy as np
import skimage.io as io
import matplotlib.pyplot as plt
import pylab
import random
import os
# --- Params --- #
FOLD_LIST = ["train", "val"]
IMAGES_DIRPATH_FORMAT = "{}/images" # var: fold
ANNOTATIONS_FILEPATH_FORMAT = "{}/annotation.json" # var: fold
# ANNOTATIONS_FILEPATH_FORMAT = "{}/annotation-small.json" # var: fold
PIXELSIZE = 0.3 # This is a guess, as that information is unknown
# --- --- #
def swap_coords(polygon):
polygon_new = polygon.copy()
polygon_new[..., 0] = polygon[..., 1]
polygon_new[..., 1] = polygon[..., 0]
return polygon_new
class Reader:
def __init__(self, raw_dirpath, fold):
assert fold in FOLD_LIST, "Input fold={} should be in FOLD_LIST={}".format(fold, FOLD_LIST)
self.images_dirpath = os.path.join(raw_dirpath, IMAGES_DIRPATH_FORMAT.format(fold))
self.annotations_filepath = os.path.join(raw_dirpath, ANNOTATIONS_FILEPATH_FORMAT.format(fold))
self.coco = COCO(self.annotations_filepath)
self.category_id_list = self.coco.loadCats(self.coco.getCatIds())
self.image_id_list = self.coco.getImgIds(catIds=self.coco.getCatIds())
def load_image(self, image_id):
img = self.coco.loadImgs(image_id)[0]
image_filepath = os.path.join(self.images_dirpath, img["file_name"])
image = io.imread(image_filepath)
image_metadata = {
"filepath": image_filepath,
"pixelsize": PIXELSIZE
}
return image, image_metadata
def load_polygons(self, image_id):
annotation_ids = self.coco.getAnnIds(imgIds=image_id)
annotation_list = self.coco.loadAnns(annotation_ids)
polygons_coords_list = []
for annotation in annotation_list:
flattened_segmentation_list = annotation["segmentation"]
flattened_arrays = np.array(flattened_segmentation_list)
arrays = np.reshape(flattened_arrays, (flattened_arrays.shape[0], -1, 2))
arrays = swap_coords(arrays)
array_list = []
for array in arrays:
array_list.append(array)
array_list.append(np.array([[np.nan, np.nan]]))
concatenated_array = np.concatenate(array_list, axis=0)
polygons_coords_list.append(concatenated_array)
return polygons_coords_list
def load_gt_data(self, image_id):
# Load image
image_array, image_metadata = self.load_image(image_id)
# Load polygon data
gt_polygons = self.load_polygons(image_id)
# TODO: remove
visualization.save_plot_image_polygons("polygons.png", image_array, [], gt_polygons, [])
# TODO end
return image_array, image_metadata, gt_polygons
def main():
raw_dirpath = "raw"
fold = "train"
reader = Reader(raw_dirpath, fold)
image_id = reader.image_id_list[1]
image_array, image_metadata, gt_polygons = reader.load_gt_data(image_id)
print(image_array.shape)
print(image_metadata)
print(gt_polygons)
if __name__ == "__main__":
main()
| 3,220 | 29.102804 | 103 | py |
mapalignment | mapalignment-master/data/AerialImageDataset/convert_npy_to_shp.py | import os.path
import sys
import read
FILE_DIRNAME = os.getcwd()
sys.path.append(os.path.join(FILE_DIRNAME, "../../projects/utils"))
import geo_utils
# --- Params --- #
RAW_DIRPATH = os.path.join(FILE_DIRNAME, "raw")
IMAGE_INFO_LIST = [
{
"city": "bloomington",
"numbers": list(range(1, 37)),
},
{
"city": "bellingham",
"numbers": list(range(1, 37)),
},
{
"city": "innsbruck",
"numbers": list(range(1, 37)),
},
{
"city": "sfo",
"numbers": list(range(1, 37)),
},
{
"city": "tyrol-e",
"numbers": list(range(1, 37)),
},
{
"city": "austin",
"numbers": list(range(1, 37)),
},
{
"city": "chicago",
"numbers": list(range(1, 37)),
},
{
"city": "kitsap",
"numbers": list(range(1, 37)),
},
{
"city": "tyrol-w",
"numbers": list(range(1, 37)),
},
{
"city": "vienna",
"numbers": list(range(1, 37)),
},
]
POLYGON_DIR_NAME = "aligned_gt_polygons_1"
SHAPEFILE_FILENAME_FORMAT = read.IMAGE_NAME_FORMAT + ".shp" # City name, number
# --- --- #
def convert_npy_to_shp(raw_dirpath, polygon_dirname, city, number, shapefile_filename_format):
# --- Load data --- #
# Load polygon data
image_filepath = read.get_image_filepath(raw_dirpath, city, number)
polygons = read.load_polygons(raw_dirpath, polygon_dirname, city, number)
if polygons is not None:
output_shapefile_filepath = read.get_polygons_filepath(raw_dirpath, polygon_dirname, city, number, overwrite_polygons_filename_format=shapefile_filename_format)
geo_utils.save_shapefile_from_polygons(polygons, image_filepath, output_shapefile_filepath)
def main():
print("Converting polygons from {}".format(POLYGON_DIR_NAME))
for image_info in IMAGE_INFO_LIST:
for number in image_info["numbers"]:
print("Converting polygons of city {}, number {}".format(image_info["city"], number))
convert_npy_to_shp(RAW_DIRPATH, POLYGON_DIR_NAME, image_info["city"], number, SHAPEFILE_FILENAME_FORMAT)
if __name__ == "__main__":
main()
| 2,189 | 25.071429 | 168 | py |
mapalignment | mapalignment-master/data/AerialImageDataset/fetch_gt_polygons.py | import sys
import os
import numpy as np
sys.path.append("../../../projects/utils")
import python_utils
import polygon_utils
import geo_utils
# --- Params --- #
DIR_PATH_LIST = ["./raw/train", "./raw/test"]
IMAGE_DIR_NAME = "images"
IMAGE_EXTENSION = "tif"
GT_POLYGONS_DIR_NAME = "gt_polygons"
# --- --- #
def load_gt_polygons(image_filepath):
gt_polygons = geo_utils.get_polygons_from_osm(image_filepath, tag="building")
if len(gt_polygons):
gt_polygons = polygon_utils.polygons_remove_holes(gt_polygons) # TODO: Remove
# Remove redundant vertices
gt_polygons = polygon_utils.simplify_polygons(gt_polygons, tolerance=1)
return gt_polygons
return None
def fetch_from_images_in_directory(dir_path):
print("Fetching for images in {}".format(dir_path))
gt_polygons_dir_path = os.path.join(dir_path, GT_POLYGONS_DIR_NAME)
if not os.path.exists(gt_polygons_dir_path):
os.makedirs(gt_polygons_dir_path)
images_dir_path = os.path.join(dir_path, IMAGE_DIR_NAME)
image_filepaths = python_utils.get_filepaths(images_dir_path, IMAGE_EXTENSION)
for i, image_filepath in enumerate(image_filepaths):
image_basename = os.path.basename(image_filepath)
image_name = os.path.splitext(image_basename)[0]
print("Fetching for image {}. Progress: {}/{}".format(image_name, i+1, len(image_filepaths)))
gt_polygons_path = os.path.join(gt_polygons_dir_path, "{}.npy".format(image_name))
if not os.path.exists(gt_polygons_path):
gt_polygons = load_gt_polygons(image_filepath)
if gt_polygons is not None:
np.save(gt_polygons_path, gt_polygons)
else:
print("Fetching did not return any polygons. Skip this one.")
else:
print("GT polygons data was already fetched, skip this one. (Delete the gt_polygons file to re-fetch)")
def main():
for dir_path in DIR_PATH_LIST:
fetch_from_images_in_directory(dir_path)
if __name__ == "__main__":
main()
| 2,053 | 29.656716 | 115 | py |
mapalignment | mapalignment-master/data/AerialImageDataset/read.py | import os.path
import csv
import sys
import numpy as np
import skimage.io
CITY_METADATA_DICT = {
"bloomington": {
"fold": "test",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"bellingham": {
"fold": "test",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"innsbruck": {
"fold": "test",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"sfo": {
"fold": "test",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"tyrol-e": {
"fold": "test",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"austin": {
"fold": "train",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"chicago": {
"fold": "train",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"kitsap": {
"fold": "train",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"tyrol-w": {
"fold": "train",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
"vienna": {
"fold": "train",
"pixelsize": 0.3,
"numbers": list(range(1, 37)),
},
}
IMAGE_DIR_NAME = "images"
IMAGE_NAME_FORMAT = "{city}{number}"
IMAGE_FILENAME_FORMAT = IMAGE_NAME_FORMAT + ".tif" # City name, number
POLYGON_DIRNAME = "gt_polygons"
POLYGONS_FILENAME_FORMAT = IMAGE_NAME_FORMAT + ".npy" # City name, number
def get_tile_info_list():
tile_info_list = []
for city, info in CITY_METADATA_DICT.items():
for number in info["numbers"]:
image_info = {
"city": city,
"number": number,
}
tile_info_list.append(image_info)
return tile_info_list
def get_image_filepath(raw_dirpath, city, number):
fold = CITY_METADATA_DICT[city]["fold"]
filename = IMAGE_FILENAME_FORMAT.format(city=city, number=number)
filepath = os.path.join(raw_dirpath, fold, IMAGE_DIR_NAME, filename)
return filepath
def get_polygons_filepath(raw_dirpath, polygon_dirname, city, number, overwrite_polygons_filename_format=None):
if overwrite_polygons_filename_format is None:
polygons_filename_format = POLYGONS_FILENAME_FORMAT
else:
polygons_filename_format = overwrite_polygons_filename_format
fold = CITY_METADATA_DICT[city]["fold"]
filename = polygons_filename_format.format(city=city, number=number)
filepath = os.path.join(raw_dirpath, fold, polygon_dirname, filename)
return filepath
def load_image(raw_dirpath, city, number):
filepath = get_image_filepath(raw_dirpath, city, number)
image_array = skimage.io.imread(filepath)
# The following is writen this way for future image-specific addition of metadata:
image_metadata = {
"filepath": filepath,
"pixelsize": CITY_METADATA_DICT[city]["pixelsize"]
}
return image_array, image_metadata
def load_polygons(raw_dirpath, polygon_dirname, city, number):
filepath = get_polygons_filepath(raw_dirpath, polygon_dirname, city, number)
try:
gt_polygons = np.load(filepath)
except FileNotFoundError:
print("City {}, number {} does not have gt polygons in directory {}".format(city, number, polygon_dirname))
gt_polygons = None
return gt_polygons
def load_gt_data(raw_dirpath, city, number, overwrite_polygon_dir_name=None):
if overwrite_polygon_dir_name is None:
polygon_dirname = POLYGON_DIRNAME
else:
polygon_dirname = overwrite_polygon_dir_name
# Load image
image_array, image_metadata = load_image(raw_dirpath, city, number)
# Load polygon data
gt_polygons = load_polygons(raw_dirpath, polygon_dirname, city, number)
return image_array, image_metadata, gt_polygons
def main():
raw_dirpath = "raw"
city = "bloomington"
number = 1
image_array, image_metadata, gt_polygons = load_gt_data(raw_dirpath, city, number)
print(image_array.shape)
print(image_metadata)
print(gt_polygons)
if __name__ == "__main__":
main()
| 4,120 | 26.657718 | 115 | py |
mapalignment | mapalignment-master/data/bradbury_buildings_roads_height_dataset/download.py | import os.path
import urllib.request
import zipfile
BASE_URL = 'https://figshare.com/collections/Aerial_imagery_object_identification_dataset_for_building_and_road_detection_and_building_height_estimation/3290519'
FILE_URL_FORMAT = "https://ndownloader.figshare.com/articles/{}/versions/1"
FILE_METADATA_LIST = [
{
"dirname": "Arlington",
"id": "3485204",
},
{
"dirname": "Atlanta",
"id": "3504308",
},
{
"dirname": "Austin",
"id": "3504317",
},
{
"dirname": "DC",
"id": "3504320",
},
{
"dirname": "NewHaven",
"id": "3504323",
},
{
"dirname": "NewYork",
"id": "3504326",
},
{
"dirname": "Norfolk",
"id": "3504347",
},
{
"dirname": "SanFrancisco",
"id": "3504350",
},
{
"dirname": "Seekonk",
"id": "3504359",
},
{
"dirname": "Data_Description",
"id": "3504413",
}
]
DOWNLOAD_DIRPATH = "raw"
if not os.path.exists(DOWNLOAD_DIRPATH):
os.makedirs(DOWNLOAD_DIRPATH)
for file_metadata in FILE_METADATA_LIST:
dirname = file_metadata["dirname"]
id = file_metadata["id"]
download_dirpath = os.path.join(DOWNLOAD_DIRPATH, dirname)
zip_download_dirpath = download_dirpath + ".zip"
if not os.path.exists(download_dirpath):
print("Downloading {}".format(dirname))
urllib.request.urlretrieve(FILE_URL_FORMAT.format(id), zip_download_dirpath)
zip_ref = zipfile.ZipFile(zip_download_dirpath, 'r')
os.makedirs(download_dirpath)
zip_ref.extractall(download_dirpath)
zip_ref.close()
os.remove(zip_download_dirpath)
else:
print("Directory {} already exists so skip download (remove directory if you want to download again)")
| 1,842 | 24.957746 | 161 | py |
mapalignment | mapalignment-master/data/bradbury_buildings_roads_height_dataset/read.py | import os.path
import csv
import numpy as np
import skimage.io
CITY_METADATA_DICT = {
"Arlington": {
"pixelsize": 0.3,
"numbers": [1, 2, 3],
},
"Atlanta": {
"pixelsize": 0.1524,
"numbers": [1, 2, 3],
},
"Austin": {
"pixelsize": 0.1524,
"numbers": [1, 2, 3],
},
"DC": {
"pixelsize": 0.16,
"numbers": [1, 2],
},
"NewHaven": {
"pixelsize": 0.3,
"numbers": [1, 2],
},
"NewYork": {
"pixelsize": 0.1524,
"numbers": [1, 2, 3],
},
"Norfolk": {
"pixelsize": 0.3048,
"numbers": [1, 2, 3],
},
"SanFrancisco": {
"pixelsize": 0.3,
"numbers": [1, 2, 3],
},
"Seekonk": {
"pixelsize": 0.3,
"numbers": [1, 2, 3],
},
}
DIRNAME_FORMAT = "{city}" # City name
IMAGE_NAME_FORMAT = "{city}_{number:02d}"
IMAGE_FILENAME_EXTENSION = ".tif"
POLYGONS_FILENAME_EXTENSION = "_buildingCoord.csv"
def get_tile_info_list():
tile_info_list = []
for city, info in CITY_METADATA_DICT.items():
for number in info["numbers"]:
image_info = {
"city": city,
"number": number,
}
tile_info_list.append(image_info)
return tile_info_list
def get_image_filepath(raw_dirpath, city, number):
dirname = DIRNAME_FORMAT.format(city=city)
image_name = IMAGE_NAME_FORMAT.format(city=city, number=number)
filename = image_name + IMAGE_FILENAME_EXTENSION
filepath = os.path.join(raw_dirpath, dirname, filename)
return filepath
def get_polygons_filepath(raw_dirpath, city, number, polygons_filename_extension):
dirname = DIRNAME_FORMAT.format(city=city)
image_name = IMAGE_NAME_FORMAT.format(city=city, number=number)
filename = image_name + polygons_filename_extension
filepath = os.path.join(raw_dirpath, dirname, filename)
return filepath
def load_image(raw_dirpath, city, number):
filepath = get_image_filepath(raw_dirpath, city, number)
image_array = skimage.io.imread(filepath)
image_array = np.array(image_array, dtype=np.float64) / 255
if image_array.shape[2] == 4:
if city == "SanFrancisco":
# San Francisco needs special treatment because its transparent pixels are white!
alpha = image_array[:, :, 3:4]
image_array = image_array[:, :, :3] * alpha # Apply alpha in 4th channel (IR channel) if present
else:
image_array = image_array[:, :, :3]
image_array = np.round(image_array * 255).astype(np.uint8)
# The following is writen this way for future image-specific addition of metadata:
image_metadata = {
"filepath": filepath,
"pixelsize": CITY_METADATA_DICT[city]["pixelsize"]
}
return image_array, image_metadata
def read_csv_row(row):
# print("Polygon: {}".format(row[1]))
coord_list = []
for item in row[3:]:
try:
item_float = float(item)
coord_list.append(item_float)
except ValueError:
pass
coord_array = np.array(coord_list, dtype=np.float64)
coord_array = np.reshape(coord_array, (-1, 2))
# Switch from xy coordinates to ij:
coord_array[:, 0], coord_array[:, 1] = coord_array[:, 1], coord_array[:, 0].copy()
# polygon_utils.plot_polygon(gt_polygon_coords, color=None, draw_labels=False, label_direction=1)
# gt_polygon_coords_no_nans = np.reshape(gt_polygon_coords[~np.isnan(gt_polygon_coords)], (-1, 2))
return coord_array
def load_csv(filepath):
polygons_coords_list = []
with open(filepath, 'r') as coords_csv:
csv_reader = csv.reader(coords_csv, delimiter=',')
for row_index, row in enumerate(csv_reader):
if row_index != 0: # Skip header
polygon_coords = read_csv_row(row)
polygons_coords_list.append(polygon_coords)
return polygons_coords_list
def load_polygons_from_npy(filepath):
try:
polygons = np.load(filepath)
except FileNotFoundError:
print("Filepath {} does not exist".format(filepath))
polygons = None
return polygons
def load_polygons(raw_dirpath, city, number, polygons_filename_extension):
filepath = get_polygons_filepath(raw_dirpath, city, number, polygons_filename_extension)
_, file_extension = os.path.splitext(filepath)
if file_extension == ".csv":
return load_csv(filepath)
elif file_extension == ".npy":
return load_polygons_from_npy(filepath)
else:
print("WARNING: file extension {} is not handled by this script. Use .csv or .npy.".format(file_extension))
return None
def load_gt_data(raw_dirpath, city, number, overwrite_polygons_filename_extension=None):
if overwrite_polygons_filename_extension is None:
polygons_filename_extension = POLYGONS_FILENAME_EXTENSION
else:
polygons_filename_extension = overwrite_polygons_filename_extension
# Load image
image_array, image_metadata = load_image(raw_dirpath, city, number)
# Load CSV data
gt_polygons = load_polygons(raw_dirpath, city, number, polygons_filename_extension)
# TODO: remove
# sys.path.append("../utils")
# import visualization
# gt_polygons_filepath = get_polygons_filepath(raw_dirpath, POLYGONS_FILENAME_FORMAT, city, number)
# visualization.save_plot_image_polygons(gt_polygons_filepath + ".polygons.png", image_array, [], gt_polygons, [])
# TODO end
return image_array, image_metadata, gt_polygons
def main():
raw_dirpath = "raw"
city = "Atlanta"
number = 1
image_array, image_metadata, gt_polygons = load_gt_data(raw_dirpath, city, number)
print(image_array.shape)
print(image_metadata)
print(gt_polygons)
if __name__ == "__main__":
main()
| 5,849 | 29.952381 | 118 | py |
cowrie | cowrie-master/setup.py | #!/usr/bin/env python
from setuptools import setup
try:
import twisted
except ImportError:
raise SystemExit("twisted not found. Make sure you "
"have installed the Twisted core package.")
setup(
packages=["cowrie", "twisted"],
include_package_data=True,
package_dir={"": "src"},
package_data={"": ["*.md"]},
use_incremental=True,
scripts=["bin/fsctl", "bin/asciinema", "bin/cowrie", "bin/createfs", "bin/playlog"],
setup_requires=["incremental", "click"],
)
import sys
def refresh_plugin_cache():
from twisted.plugin import IPlugin, getPlugins
list(getPlugins(IPlugin))
| 642 | 22.814815 | 88 | py |
cowrie | cowrie-master/src/twisted/plugins/cowrie_plugin.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
import os
import sys
from typing import ClassVar
from collections.abc import Callable
from zope.interface import implementer, provider
from incremental import Version
from twisted._version import __version__ as __twisted_version__
from twisted.application import service
from twisted.application.service import IServiceMaker
from twisted.cred import portal
from twisted.internet import reactor
from twisted.logger import ILogObserver, globalLogPublisher
from twisted.plugin import IPlugin
from twisted.python import log, usage
import cowrie.core.checkers
import cowrie.core.realm
import cowrie.ssh.factory
import cowrie.telnet.factory
from backend_pool.pool_server import PoolServerFactory
from cowrie import core
from cowrie._version import __version__ as __cowrie_version__
from cowrie.core.config import CowrieConfig
from cowrie.core.utils import create_endpoint_services, get_endpoints_from_section
from cowrie.pool_interface.handler import PoolHandler
if __twisted_version__ < Version("Twisted", 20, 0, 0):
raise ImportError(
"Your version of Twisted is too old. Please ensure your virtual environment is set up correctly."
)
class Options(usage.Options):
"""
This defines commandline options and flags
"""
# The '-c' parameters is currently ignored
optParameters: list[str] = []
optFlags: list[list[str]] = [["help", "h", "Display this help and exit."]]
@provider(ILogObserver)
def importFailureObserver(event: dict) -> None:
if "failure" in event and event["failure"].type is ImportError:
log.err(
"ERROR: %s. Please run `pip install -U -r requirements.txt` "
"from Cowrie's install directory and virtualenv to install "
"the new dependency" % event["failure"].value.message
)
globalLogPublisher.addObserver(importFailureObserver)
@implementer(IServiceMaker, IPlugin)
class CowrieServiceMaker:
tapname: ClassVar[str] = "cowrie"
description: ClassVar[str] = "She sells sea shells by the sea shore."
options = Options
output_plugins: list[Callable] = []
topService: service.Service
def __init__(self) -> None:
self.pool_handler = None
# ssh is enabled by default
self.enableSSH: bool = CowrieConfig.getboolean("ssh", "enabled", fallback=True)
# telnet is disabled by default
self.enableTelnet: bool = CowrieConfig.getboolean(
"telnet", "enabled", fallback=False
)
# pool is disabled by default, but need to check this setting in case user only wants to run the pool
self.pool_only: bool = CowrieConfig.getboolean(
"backend_pool", "pool_only", fallback=False
)
def makeService(self, options: dict) -> service.Service:
"""
Construct a TCPServer from a factory defined in Cowrie.
"""
if options["help"] is True:
print( # noqa: T201
"""Usage: twistd [options] cowrie [-h]
Options:
-h, --help print this help message.
Makes a Cowrie SSH/Telnet honeypot.
"""
)
sys.exit(1)
if os.name == "posix" and os.getuid() == 0:
print("ERROR: You must not run cowrie as root!") # noqa: T201
sys.exit(1)
tz: str = CowrieConfig.get("honeypot", "timezone", fallback="UTC")
# `system` means use the system time zone
if tz != "system":
os.environ["TZ"] = tz
log.msg("Python Version {}".format(str(sys.version).replace("\n", "")))
log.msg(
"Twisted Version {}.{}.{}".format(
__twisted_version__.major,
__twisted_version__.minor,
__twisted_version__.micro,
)
)
log.msg(
"Cowrie Version {}.{}.{}".format(
__cowrie_version__.major,
__cowrie_version__.minor,
__cowrie_version__.micro,
)
)
# check configurations
if not self.enableTelnet and not self.enableSSH and not self.pool_only:
print( # noqa: T201
"ERROR: You must at least enable SSH or Telnet, or run the backend pool"
)
sys.exit(1)
# Load output modules
self.output_plugins = []
for x in CowrieConfig.sections():
if not x.startswith("output_"):
continue
if CowrieConfig.getboolean(x, "enabled") is False:
continue
engine: str = x.split("_")[1]
try:
output = __import__(
f"cowrie.output.{engine}", globals(), locals(), ["output"]
).Output()
log.addObserver(output.emit)
self.output_plugins.append(output)
log.msg(f"Loaded output engine: {engine}")
except ImportError as e:
log.err(
f"Failed to load output engine: {engine} due to ImportError: {e}"
)
log.msg(
f"Please install the dependencies for {engine} listed in requirements-output.txt"
)
except Exception:
log.err()
log.msg(f"Failed to load output engine: {engine}")
self.topService = service.MultiService()
application = service.Application("cowrie")
self.topService.setServiceParent(application)
# initialise VM pool handling - only if proxy AND pool set to enabled, and pool is to be deployed here
# or also enabled if pool_only is true
backend_type: str = CowrieConfig.get("honeypot", "backend", fallback="shell")
proxy_backend: str = CowrieConfig.get("proxy", "backend", fallback="simple")
if (backend_type == "proxy" and proxy_backend == "pool") or self.pool_only:
# in this case we need to set some kind of pool connection
local_pool: bool = (
CowrieConfig.get("proxy", "pool", fallback="local") == "local"
)
pool_host: str = CowrieConfig.get(
"proxy", "pool_host", fallback="127.0.0.1"
)
pool_port: int = CowrieConfig.getint("proxy", "pool_port", fallback=6415)
if local_pool or self.pool_only:
# start a pool locally
f = PoolServerFactory()
f.tac = self # type: ignore
listen_endpoints = get_endpoints_from_section(
CowrieConfig, "backend_pool", 6415
)
create_endpoint_services(reactor, self.topService, listen_endpoints, f)
pool_host = "127.0.0.1" # force use of local interface
# either way (local or remote) we set up a client to the pool
# unless this instance has no SSH and Telnet (pool only)
if (self.enableTelnet or self.enableSSH) and not self.pool_only:
self.pool_handler = PoolHandler(pool_host, pool_port, self) # type: ignore
else:
# we initialise the services directly
self.pool_ready()
return self.topService
def pool_ready(self) -> None:
backend: str = CowrieConfig.get("honeypot", "backend", fallback="shell")
# this method is never called if self.pool_only is False,
# since we do not start the pool handler that would call it
if self.enableSSH:
factory = cowrie.ssh.factory.CowrieSSHFactory(backend, self.pool_handler)
factory.tac = self # type: ignore
factory.portal = portal.Portal(core.realm.HoneyPotRealm())
factory.portal.registerChecker(core.checkers.HoneypotPublicKeyChecker())
factory.portal.registerChecker(core.checkers.HoneypotPasswordChecker())
if CowrieConfig.getboolean("ssh", "auth_none_enabled", fallback=False):
factory.portal.registerChecker(core.checkers.HoneypotNoneChecker())
if CowrieConfig.has_section("ssh"):
listen_endpoints = get_endpoints_from_section(CowrieConfig, "ssh", 2222)
else:
listen_endpoints = get_endpoints_from_section(
CowrieConfig, "honeypot", 2222
)
create_endpoint_services(
reactor, self.topService, listen_endpoints, factory
)
if self.enableTelnet:
f = cowrie.telnet.factory.HoneyPotTelnetFactory(backend, self.pool_handler)
f.tac = self
f.portal = portal.Portal(core.realm.HoneyPotRealm())
f.portal.registerChecker(core.checkers.HoneypotPasswordChecker())
listen_endpoints = get_endpoints_from_section(CowrieConfig, "telnet", 2223)
create_endpoint_services(reactor, self.topService, listen_endpoints, f)
# Now construct an object which *provides* the relevant interfaces
# The name of this variable is irrelevant, as long as there is *some*
# name bound to a provider of IPlugin and IServiceMaker.
serviceMaker = CowrieServiceMaker()
| 10,607 | 38.434944 | 110 | py |
cowrie | cowrie-master/src/cowrie/_version.py | """
Provides cowrie version information.
"""
# This file is auto-generated! Do not edit!
# Use `python -m incremental.update cowrie` to change this file.
from __future__ import annotations
from incremental import Version
__version__ = Version("cowrie", 2, 5, 0)
__all__: list[str] = ["__version__"]
| 303 | 20.714286 | 64 | py |
cowrie | cowrie-master/src/cowrie/__init__.py | # setup version
from ._version import __version__ as version
__version__: str = version.short()
| 97 | 18.6 | 44 | py |
cowrie | cowrie-master/src/cowrie/output/xmpp.py | from __future__ import annotations
import json
import string
from random import choice
from wokkel import muc
from wokkel.client import XMPPClient
from wokkel.xmppim import AvailablePresence
from twisted.application import service
from twisted.python import log
from twisted.words.protocols.jabber import jid
from twisted.words.protocols.jabber.jid import JID
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class XMPPLoggerProtocol(muc.MUCClient): # type: ignore
def __init__(self, rooms, server, nick):
muc.MUCClient.__init__(self)
self.server = rooms.host
self.jrooms = rooms
self._roomOccupantMap = {}
log.msg(rooms.user)
log.msg(rooms.host)
self.nick = nick
self.last = {}
self.activity = None
def connectionInitialized(self):
"""
The bot has connected to the xmpp server, now try to join the room.
"""
self.join(self.jrooms, self.nick)
def joinedRoom(self, room):
log.msg(f"Joined room {room.name}")
def connectionMade(self):
log.msg("Connected!")
# send initial presence
self.send(AvailablePresence())
def connectionLost(self, reason):
log.msg("Disconnected!")
def onMessage(self, msg):
pass
def receivedGroupChat(self, room, user, body):
pass
def receivedHistory(self, room, user, body, dely, frm=None):
pass
class Output(cowrie.core.output.Output):
"""
xmpp output
"""
def start(self):
server = CowrieConfig.get("output_xmpp", "server")
user = CowrieConfig.get("output_xmpp", "user")
password = CowrieConfig.get("output_xmpp", "password")
muc = CowrieConfig.get("output_xmpp", "muc")
resource = "".join([choice(string.ascii_letters) for i in range(8)])
jid = user + "/" + resource
application = service.Application("honeypot")
self.run(application, jid, password, JID(None, [muc, server, None]), server)
def run(self, application, jidstr, password, muc, server):
self.xmppclient = XMPPClient(JID(jidstr), password)
if CowrieConfig.getboolean("output_xmpp", "debug", fallback=False):
self.xmppclient.logTraffic = True
(user, host, resource) = jid.parse(jidstr)
self.muc = XMPPLoggerProtocol(muc, server, user + "-" + resource)
self.muc.setHandlerParent(self.xmppclient)
self.xmppclient.setServiceParent(application)
self.anonymous = True
self.xmppclient.startService()
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
elif i == "time":
del logentry[i]
msgJson = json.dumps(logentry, indent=5)
self.muc.groupChat(self.muc.jrooms, msgJson)
def stop(self):
self.xmppclient.stopService()
| 2,986 | 29.479592 | 84 | py |
cowrie | cowrie-master/src/cowrie/output/rethinkdblog.py | from __future__ import annotations
import time
from datetime import datetime
import rethinkdb as r
import cowrie.core.output
from cowrie.core.config import CowrieConfig
def iso8601_to_timestamp(value):
return time.mktime(datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ").timetuple())
RETHINK_DB_SEGMENT = "output_rethinkdblog"
class Output(cowrie.core.output.Output):
# noinspection PyAttributeOutsideInit
def start(self):
self.host = CowrieConfig.get(RETHINK_DB_SEGMENT, "host")
self.port = CowrieConfig.getint(RETHINK_DB_SEGMENT, "port")
self.db = CowrieConfig.get(RETHINK_DB_SEGMENT, "db")
self.table = CowrieConfig.get(RETHINK_DB_SEGMENT, "table")
self.password = CowrieConfig.get(RETHINK_DB_SEGMENT, "password", raw=True)
self.connection = r.connect(
host=self.host, port=self.port, db=self.db, password=self.password
)
try:
r.db_create(self.db).run(self.connection)
r.db(self.db).table_create(self.table).run(self.connection)
except r.RqlRuntimeError:
pass
def stop(self):
self.connection.close()
def write(self, logentry):
for i in list(logentry.keys()):
# remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
if "timestamp" in logentry:
logentry["timestamp"] = iso8601_to_timestamp(logentry["timestamp"])
r.table(self.table).insert(logentry).run(self.connection)
| 1,526 | 30.163265 | 85 | py |
cowrie | cowrie-master/src/cowrie/output/reversedns.py | from __future__ import annotations
from functools import lru_cache
import ipaddress
from twisted.internet import defer
from twisted.names import client, error
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
Output plugin used for reverse DNS lookup
"""
timeout: list[int] = [3]
def start(self):
"""
Start Output Plugin
"""
self.timeout = [CowrieConfig.getint("output_reversedns", "timeout", fallback=3)]
def stop(self):
"""
Stop Output Plugin
"""
pass
def write(self, entry):
"""
Process log entry
"""
def processConnect(result):
"""
Create log messages for connect events
"""
if result is None:
return
payload = result[0][0].payload
log.msg(
eventid="cowrie.reversedns.connect",
session=entry["session"],
format="reversedns: PTR record for IP %(src_ip)s is %(ptr)s"
" ttl=%(ttl)i",
src_ip=entry["src_ip"],
ptr=str(payload.name),
ttl=payload.ttl,
)
def processForward(result):
"""
Create log messages for forward events
"""
if result is None:
return
payload = result[0][0].payload
log.msg(
eventid="cowrie.reversedns.forward",
session=entry["session"],
format="reversedns: PTR record for IP %(dst_ip)s is %(ptr)s"
" ttl=%(ttl)i",
dst_ip=entry["dst_ip"],
ptr=str(payload.name),
ttl=payload.ttl,
)
def cbError(failure):
if failure.type == defer.TimeoutError:
log.msg("reversedns: Timeout in DNS lookup")
elif failure.type == error.DNSNameError:
# DNSNameError is the NXDOMAIN response
log.msg("reversedns: No PTR record returned")
else:
log.msg("reversedns: Error in DNS lookup")
failure.printTraceback()
if entry["eventid"] == "cowrie.session.connect":
d = self.reversedns(entry["src_ip"])
if d is not None:
d.addCallback(processConnect)
d.addErrback(cbError)
elif entry["eventid"] == "cowrie.direct-tcpip.request":
d = self.reversedns(entry["dst_ip"])
if d is not None:
d.addCallback(processForward)
d.addErrback(cbError)
@lru_cache(maxsize=1000)
def reversedns(self, addr):
"""
Perform a reverse DNS lookup on an IP
Arguments:
addr -- IPv4 Address
"""
try:
ptr = ipaddress.ip_address(addr).reverse_pointer
except ValueError:
return None
d = client.lookupPointer(ptr, timeout=self.timeout)
return d
| 3,128 | 28.242991 | 88 | py |
cowrie | cowrie-master/src/cowrie/output/mysql.py | """
MySQL output connector. Writes audit logs to MySQL database
"""
from __future__ import annotations
from twisted.enterprise import adbapi
from twisted.internet import defer
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
# For exceptions: https://dev.mysql.com/doc/connector-python/en/connector-python-api-errors-error.html
import mysql.connector
class ReconnectingConnectionPool(adbapi.ConnectionPool):
"""
Reconnecting adbapi connection pool for MySQL.
This class improves on the solution posted at
http://www.gelens.org/2008/09/12/reinitializing-twisted-connectionpool/
by checking exceptions by error code and only disconnecting the current
connection instead of all of them.
CR_CONN_HOST_ERROR: 2003: Cant connect to MySQL server on server (10061)
CR_SERVER_GONE_ERROR: 2006: MySQL server has gone away
CR_SERVER_LOST 2013: Lost connection to MySQL server
ER_LOCK_DEADLOCK 1213: Deadlock found when trying to get lock)
Also see:
http://twistedmatrix.com/pipermail/twisted-python/2009-July/020007.html
"""
def _runInteraction(self, interaction, *args, **kw):
try:
return adbapi.ConnectionPool._runInteraction(self, interaction, *args, **kw)
except mysql.connector.Error as e:
# except (MySQLdb.OperationalError, MySQLdb._exceptions.OperationalError) as e:
if e.errno not in (
mysql.connector.errorcode.CR_CONN_HOST_ERROR,
mysql.connector.errorcode.CR_SERVER_GONE_ERROR,
mysql.connector.errorcode.CR_SERVER_LOST,
mysql.connector.errorcode.ER_LOCK_DEADLOCK,
):
raise e
log.msg(f"output_mysql: got error {e!r}, retrying operation")
conn = self.connections.get(self.threadID())
self.disconnect(conn)
# Try the interaction again
return adbapi.ConnectionPool._runInteraction(self, interaction, *args, **kw)
class Output(cowrie.core.output.Output):
"""
MySQL output
"""
debug: bool = False
def start(self):
self.debug = CowrieConfig.getboolean("output_mysql", "debug", fallback=False)
port = CowrieConfig.getint("output_mysql", "port", fallback=3306)
try:
self.db = ReconnectingConnectionPool(
"mysql.connector",
host=CowrieConfig.get("output_mysql", "host"),
db=CowrieConfig.get("output_mysql", "database"),
user=CowrieConfig.get("output_mysql", "username"),
passwd=CowrieConfig.get("output_mysql", "password", raw=True),
port=port,
cp_min=1,
cp_max=1,
charset="utf8mb4",
cp_reconnect=True,
use_unicode=True,
)
# except (MySQLdb.Error, MySQLdb._exceptions.Error) as e:
except Exception as e:
log.msg(f"output_mysql: Error {e.args[0]}: {e.args[1]}")
def stop(self):
self.db.close()
def sqlerror(self, error):
"""
1146, "Table '...' doesn't exist"
1406, "Data too long for column '...' at row ..."
"""
if error.value.args[0] in (1146, 1406):
log.msg(f"output_mysql: MySQL Error: {error.value.args!r}")
log.msg(
"output_mysql: MySQL schema maybe misconfigured, doublecheck database!"
)
else:
log.msg(f"output_mysql: MySQL Error: {error.value.args!r}")
def simpleQuery(self, sql, args):
"""
Just run a deferred sql query, only care about errors
"""
if self.debug:
log.msg(f"output_mysql: MySQL query: {sql} {args!r}")
d = self.db.runQuery(sql, args)
d.addErrback(self.sqlerror)
@defer.inlineCallbacks
def write(self, entry):
if entry["eventid"] == "cowrie.session.connect":
if self.debug:
log.msg(
f"output_mysql: SELECT `id` FROM `sensors` WHERE `ip` = '{self.sensor}'"
)
r = yield self.db.runQuery(
f"SELECT `id` FROM `sensors` WHERE `ip` = '{self.sensor}'"
)
if r:
sensorid = r[0][0]
else:
if self.debug:
log.msg(
f"output_mysql: INSERT INTO `sensors` (`ip`) VALUES ('{self.sensor}')"
)
yield self.db.runQuery(
f"INSERT INTO `sensors` (`ip`) VALUES ('{self.sensor}')"
)
r = yield self.db.runQuery("SELECT LAST_INSERT_ID()")
sensorid = int(r[0][0])
self.simpleQuery(
"INSERT INTO `sessions` (`id`, `starttime`, `sensor`, `ip`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s, %s)",
(entry["session"], entry["time"], sensorid, entry["src_ip"]),
)
elif entry["eventid"] == "cowrie.login.success":
self.simpleQuery(
"INSERT INTO `auth` (`session`, `success`, `username`, `password`, `timestamp`) "
"VALUES (%s, %s, %s, %s, FROM_UNIXTIME(%s))",
(
entry["session"],
1,
entry["username"],
entry["password"],
entry["time"],
),
)
elif entry["eventid"] == "cowrie.login.failed":
self.simpleQuery(
"INSERT INTO `auth` (`session`, `success`, `username`, `password`, `timestamp`) "
"VALUES (%s, %s, %s, %s, FROM_UNIXTIME(%s))",
(
entry["session"],
0,
entry["username"],
entry["password"],
entry["time"],
),
)
elif entry["eventid"] == "cowrie.session.params":
self.simpleQuery(
"INSERT INTO `params` (`session`, `arch`) VALUES (%s, %s)",
(entry["session"], entry["arch"]),
)
elif entry["eventid"] == "cowrie.command.input":
self.simpleQuery(
"INSERT INTO `input` (`session`, `timestamp`, `success`, `input`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s , %s)",
(entry["session"], entry["time"], 1, entry["input"]),
)
elif entry["eventid"] == "cowrie.command.failed":
self.simpleQuery(
"INSERT INTO `input` (`session`, `timestamp`, `success`, `input`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s , %s)",
(entry["session"], entry["time"], 0, entry["input"]),
)
elif entry["eventid"] == "cowrie.session.file_download":
self.simpleQuery(
"INSERT INTO `downloads` (`session`, `timestamp`, `url`, `outfile`, `shasum`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s, %s, %s)",
(
entry["session"],
entry["time"],
entry.get("url", ""),
entry["outfile"],
entry["shasum"],
),
)
elif entry["eventid"] == "cowrie.session.file_download.failed":
self.simpleQuery(
"INSERT INTO `downloads` (`session`, `timestamp`, `url`, `outfile`, `shasum`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s, %s, %s)",
(entry["session"], entry["time"], entry.get("url", ""), "NULL", "NULL"),
)
elif entry["eventid"] == "cowrie.session.file_upload":
self.simpleQuery(
"INSERT INTO `downloads` (`session`, `timestamp`, `url`, `outfile`, `shasum`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s, %s, %s)",
(
entry["session"],
entry["time"],
"",
entry["outfile"],
entry["shasum"],
),
)
elif entry["eventid"] == "cowrie.session.input":
self.simpleQuery(
"INSERT INTO `input` (`session`, `timestamp`, `realm`, `input`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s , %s)",
(entry["session"], entry["time"], entry["realm"], entry["input"]),
)
elif entry["eventid"] == "cowrie.client.version":
r = yield self.db.runQuery(
"SELECT `id` FROM `clients` WHERE `version` = %s",
(entry["version"],),
)
if r:
id = int(r[0][0])
else:
yield self.db.runQuery(
"INSERT INTO `clients` (`version`) VALUES (%s)",
(entry["version"],),
)
r = yield self.db.runQuery("SELECT LAST_INSERT_ID()")
id = int(r[0][0])
self.simpleQuery(
"UPDATE `sessions` SET `client` = %s WHERE `id` = %s",
(id, entry["session"]),
)
elif entry["eventid"] == "cowrie.client.size":
self.simpleQuery(
"UPDATE `sessions` SET `termsize` = %s WHERE `id` = %s",
("{}x{}".format(entry["width"], entry["height"]), entry["session"]),
)
elif entry["eventid"] == "cowrie.session.closed":
self.simpleQuery(
"UPDATE `sessions` "
"SET `endtime` = FROM_UNIXTIME(%s) "
"WHERE `id` = %s",
(entry["time"], entry["session"]),
)
elif entry["eventid"] == "cowrie.log.closed":
self.simpleQuery(
"INSERT INTO `ttylog` (`session`, `ttylog`, `size`) "
"VALUES (%s, %s, %s)",
(entry["session"], entry["ttylog"], entry["size"]),
)
elif entry["eventid"] == "cowrie.client.fingerprint":
self.simpleQuery(
"INSERT INTO `keyfingerprints` (`session`, `username`, `fingerprint`) "
"VALUES (%s, %s, %s)",
(entry["session"], entry["username"], entry["fingerprint"]),
)
elif entry["eventid"] == "cowrie.direct-tcpip.request":
self.simpleQuery(
"INSERT INTO `ipforwards` (`session`, `timestamp`, `dst_ip`, `dst_port`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s, %s)",
(entry["session"], entry["time"], entry["dst_ip"], entry["dst_port"]),
)
elif entry["eventid"] == "cowrie.direct-tcpip.data":
self.simpleQuery(
"INSERT INTO `ipforwardsdata` (`session`, `timestamp`, `dst_ip`, `dst_port`, `data`) "
"VALUES (%s, FROM_UNIXTIME(%s), %s, %s, %s)",
(
entry["session"],
entry["time"],
entry["dst_ip"],
entry["dst_port"],
entry["data"],
),
)
| 11,175 | 37.143345 | 102 | py |
cowrie | cowrie-master/src/cowrie/output/telegram.py | # Simple Telegram Bot logger
import treq
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
telegram output
"""
def start(self):
self.bot_token = CowrieConfig.get("output_telegram", "bot_token")
self.chat_id = CowrieConfig.get("output_telegram", "chat_id")
def stop(self):
pass
def write(self, logentry):
for i in list(logentry.keys()):
# remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
logon_type = ""
# Prepare logon type
if "HoneyPotSSHTransport" in (logentry["system"].split(","))[0]:
logon_type = "SSH"
elif "CowrieTelnetTransport" in (logentry["system"].split(","))[0]:
logon_type = "Telnet"
# Prepare base message
msgtxt = "<strong>[Cowrie " + logentry["sensor"] + "]</strong>"
msgtxt += "\nEvent: " + logentry["eventid"]
msgtxt += "\nLogon type: " + logon_type
msgtxt += "\nSource: <code>" + logentry["src_ip"] + "</code>"
msgtxt += "\nSession: <code>" + logentry["session"] + "</code>"
if logentry["eventid"] == "cowrie.login.success":
msgtxt += "\nUsername: <code>" + logentry["username"] + "</code>"
msgtxt += "\nPassword: <code>" + logentry["password"] + "</code>"
self.send_message(msgtxt)
elif logentry["eventid"] in ["cowrie.command.failed", "cowrie.command.input"]:
msgtxt += "\nCommand: <pre>" + logentry["input"] + "</pre>"
self.send_message(msgtxt)
elif logentry["eventid"] == "cowrie.session.file_download":
msgtxt += "\nUrl: " + logentry.get("url", "")
self.send_message(msgtxt)
def send_message(self, message):
log.msg("Telegram plugin will try to call TelegramBot")
try:
treq.get(
"https://api.telegram.org/bot" + self.bot_token + "/sendMessage",
params=[
("chat_id", str(self.chat_id)),
("parse_mode", "HTML"),
("text", message),
],
)
except Exception:
log.msg("Telegram plugin request error")
| 2,326 | 34.8 | 86 | py |
cowrie | cowrie-master/src/cowrie/output/influx.py | from __future__ import annotations
import re
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
influx output
"""
def start(self):
host = CowrieConfig.get("output_influx", "host", fallback="")
port = CowrieConfig.getint("output_influx", "port", fallback=8086)
ssl = CowrieConfig.getboolean("output_influx", "ssl", fallback=False)
self.client = None
try:
self.client = InfluxDBClient(host=host, port=port, ssl=ssl, verify_ssl=ssl)
except InfluxDBClientError as e:
log.msg(f"output_influx: I/O error({e.code}): '{e.message}'")
return
if self.client is None:
log.msg("output_influx: cannot instantiate client!")
return
if CowrieConfig.has_option(
"output_influx", "username"
) and CowrieConfig.has_option("output_influx", "password"):
username = CowrieConfig.get("output_influx", "username")
password = CowrieConfig.get("output_influx", "password", raw=True)
self.client.switch_user(username, password)
try:
dbname = CowrieConfig.get("output_influx", "database_name")
except Exception:
dbname = "cowrie"
retention_policy_duration_default = "12w"
retention_policy_name = dbname + "_retention_policy"
if CowrieConfig.has_option("output_influx", "retention_policy_duration"):
retention_policy_duration = CowrieConfig.get(
"output_influx", "retention_policy_duration"
)
match = re.search(r"^\d+[dhmw]{1}$", retention_policy_duration)
if not match:
log.msg(
(
"output_influx: invalid retention policy."
"Using default '{}'.."
).format(retention_policy_duration)
)
retention_policy_duration = retention_policy_duration_default
else:
retention_policy_duration = retention_policy_duration_default
database_list = self.client.get_list_database()
dblist = [str(elem["name"]) for elem in database_list]
if dbname not in dblist:
self.client.create_database(dbname)
self.client.create_retention_policy(
retention_policy_name,
retention_policy_duration,
1,
database=dbname,
default=True,
)
else:
retention_policies_list = self.client.get_list_retention_policies(
database=dbname
)
rplist = [str(elem["name"]) for elem in retention_policies_list]
if retention_policy_name not in rplist:
self.client.create_retention_policy(
retention_policy_name,
retention_policy_duration,
1,
database=dbname,
default=True,
)
else:
self.client.alter_retention_policy(
retention_policy_name,
database=dbname,
duration=retention_policy_duration,
replication=1,
default=True,
)
self.client.switch_database(dbname)
def stop(self):
pass
def write(self, entry):
if self.client is None:
log.msg("output_influx: client object is not instantiated")
return
# event id
eventid = entry["eventid"]
# measurement init
m = {
"measurement": eventid.replace(".", "_"),
"tags": {"session": entry["session"], "src_ip": entry["src_ip"]},
"fields": {"sensor": self.sensor},
}
# event parsing
if eventid in ["cowrie.command.failed", "cowrie.command.input"]:
m["fields"].update(
{
"input": entry["input"],
}
)
elif eventid == "cowrie.session.connect":
m["fields"].update(
{
"protocol": entry["protocol"],
"src_port": entry["src_port"],
"dst_port": entry["dst_port"],
"dst_ip": entry["dst_ip"],
}
)
elif eventid in ["cowrie.login.success", "cowrie.login.failed"]:
m["fields"].update(
{
"username": entry["username"],
"password": entry["password"],
}
)
elif eventid == "cowrie.session.file_download":
m["fields"].update(
{
"shasum": entry.get("shasum"),
"url": entry.get("url"),
"outfile": entry.get("outfile"),
}
)
elif eventid == "cowrie.session.file_download.failed":
m["fields"].update({"url": entry.get("url")})
elif eventid == "cowrie.session.file_upload":
m["fields"].update(
{
"shasum": entry.get("shasum"),
"outfile": entry.get("outfile"),
}
)
elif eventid == "cowrie.session.closed":
m["fields"].update({"duration": entry["duration"]})
elif eventid == "cowrie.client.version":
m["fields"].update(
{
"version": ",".join(entry["version"]),
}
)
elif eventid == "cowrie.client.kex":
m["fields"].update(
{
"maccs": ",".join(entry["macCS"]),
"kexalgs": ",".join(entry["kexAlgs"]),
"keyalgs": ",".join(entry["keyAlgs"]),
"compcs": ",".join(entry["compCS"]),
"enccs": ",".join(entry["encCS"]),
}
)
elif eventid == "cowrie.client.size":
m["fields"].update(
{
"height": entry["height"],
"width": entry["width"],
}
)
elif eventid == "cowrie.client.var":
m["fields"].update(
{
"name": entry["name"],
"value": entry["value"],
}
)
elif eventid == "cowrie.client.fingerprint":
m["fields"].update({"fingerprint": entry["fingerprint"]})
# cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
# cowrie.log.closed
# are not implemented
else:
# other events should be handled
log.msg(f"output_influx: event '{eventid}' not handled. Skipping..")
return
result = self.client.write_points([m])
if not result:
log.msg(
"output_influx: error when writing '{}' measurement"
"in the db.".format(eventid)
)
| 7,285 | 31.968326 | 87 | py |
cowrie | cowrie-master/src/cowrie/output/virustotal.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""
Send SSH logins to Virustotal
"""
from __future__ import annotations
import datetime
import json
import os
from typing import Any
from urllib.parse import urlencode, urlparse
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.ssl import ClientContextFactory
from twisted.python import log
from twisted.web import client, http_headers
from twisted.web.iweb import IBodyProducer
import cowrie.core.output
from cowrie.core.config import CowrieConfig
COWRIE_USER_AGENT = "Cowrie Honeypot"
VTAPI_URL = "https://www.virustotal.com/vtapi/v2/"
COMMENT = "First seen by #Cowrie SSH/telnet Honeypot http://github.com/cowrie/cowrie"
TIME_SINCE_FIRST_DOWNLOAD = datetime.timedelta(minutes=1)
class Output(cowrie.core.output.Output):
"""
virustotal output
"""
apiKey: str
debug: bool = False
commenttext: str
agent: Any
scan_url: bool
scan_file: bool
url_cache: dict[
str, datetime.datetime
] = {} # url and last time succesfully submitted
def start(self) -> None:
"""
Start output plugin
"""
self.apiKey = CowrieConfig.get("output_virustotal", "api_key")
self.debug = CowrieConfig.getboolean(
"output_virustotal", "debug", fallback=False
)
self.upload = CowrieConfig.getboolean(
"output_virustotal", "upload", fallback=True
)
self.comment = CowrieConfig.getboolean(
"output_virustotal", "comment", fallback=True
)
self.scan_file = CowrieConfig.getboolean(
"output_virustotal", "scan_file", fallback=True
)
self.scan_url = CowrieConfig.getboolean(
"output_virustotal", "scan_url", fallback=False
)
self.commenttext = CowrieConfig.get(
"output_virustotal", "commenttext", fallback=COMMENT
)
self.agent = client.Agent(reactor, WebClientContextFactory())
def stop(self) -> None:
"""
Stop output plugin
"""
def write(self, entry: dict[str, Any]) -> None:
if entry["eventid"] == "cowrie.session.file_download":
if self.scan_url and "url" in entry:
log.msg("Checking url scan report at VT")
self.scanurl(entry)
if self._is_new_shasum(entry["shasum"]) and self.scan_file:
log.msg("Checking file scan report at VT")
self.scanfile(entry)
elif entry["eventid"] == "cowrie.session.file_upload":
if self._is_new_shasum(entry["shasum"]) and self.scan_file:
log.msg("Checking file scan report at VT")
self.scanfile(entry)
def _is_new_shasum(self, shasum):
# Get the downloaded file's modification time
shasumfile = os.path.join(CowrieConfig.get("honeypot", "download_path"), shasum)
file_modification_time = datetime.datetime.fromtimestamp(
os.stat(shasumfile).st_mtime
)
# Assumptions:
# 1. A downloaded file that was already downloaded before is not written instead of the first downloaded file
# 2. On that stage of the code, the file that needs to be scanned in VT is supposed to be downloaded already
#
# Check:
# If the file was first downloaded more than a "period of time" (e.g 1 min) ago -
# it has been apparently scanned before in VT and therefore is not going to be checked again
if file_modification_time < datetime.datetime.now() - TIME_SINCE_FIRST_DOWNLOAD:
log.msg(f"File with shasum '{shasum}' was downloaded before")
return False
return True
def scanfile(self, entry):
"""
Check file scan report for a hash
Argument is full event so we can access full file later on
"""
vtUrl = f"{VTAPI_URL}file/report".encode()
headers = http_headers.Headers({"User-Agent": [COWRIE_USER_AGENT]})
fields = {"apikey": self.apiKey, "resource": entry["shasum"], "allinfo": 1}
body = StringProducer(urlencode(fields).encode("utf-8"))
d = self.agent.request(b"POST", vtUrl, headers, body)
def cbResponse(response):
"""
Main response callback, check HTTP response code
"""
if response.code == 200:
d = client.readBody(response)
d.addCallback(cbBody)
return d
else:
log.msg(f"VT Request failed: {response.code} {response.phrase}")
def cbBody(body):
"""
Received body
"""
return processResult(body)
def cbPartial(failure):
"""
Google HTTP Server does not set Content-Length. Twisted marks it as partial
"""
return processResult(failure.value.response)
def cbError(failure):
log.msg("VT: Error in scanfile")
failure.printTraceback()
def processResult(result):
"""
Extract the information we need from the body
"""
if self.debug:
log.msg(f"VT scanfile result: {result}")
result = result.decode("utf8")
j = json.loads(result)
log.msg("VT: {}".format(j["verbose_msg"]))
if j["response_code"] == 0:
log.msg(
eventid="cowrie.virustotal.scanfile",
format="VT: New file %(sha256)s",
session=entry["session"],
sha256=j["resource"],
is_new="true",
)
try:
b = os.path.basename(urlparse(entry["url"]).path)
if b == "":
fileName = entry["shasum"]
else:
fileName = b
except KeyError:
fileName = entry["shasum"]
if self.upload is True:
return self.postfile(entry["outfile"], fileName)
else:
return
elif j["response_code"] == 1:
log.msg("VT: response=1: this has been scanned before")
# Add detailed report to json log
scans_summary: dict[str, dict[str, str]] = {}
for feed, info in j["scans"].items():
feed_key = feed.lower()
scans_summary[feed_key] = {}
scans_summary[feed_key]["detected"] = str(info["detected"]).lower()
scans_summary[feed_key]["result"] = str(info["result"]).lower()
log.msg(
eventid="cowrie.virustotal.scanfile",
format="VT: Binary file with sha256 %(sha256)s was found malicious "
"by %(positives)s out of %(total)s feeds (scanned on %(scan_date)s)",
session=entry["session"],
positives=j["positives"],
total=j["total"],
scan_date=j["scan_date"],
sha256=j["resource"],
scans=scans_summary,
is_new="false",
)
log.msg("VT: permalink: {}".format(j["permalink"]))
elif j["response_code"] == -2:
log.msg("VT: response=-2: this has been queued for analysis already")
else:
log.msg("VT: unexpected response code: {}".format(j["response_code"]))
d.addCallback(cbResponse)
d.addErrback(cbError)
return d
def postfile(self, artifact, fileName):
"""
Send a file to VirusTotal
"""
vtUrl = f"{VTAPI_URL}file/scan".encode()
fields = {("apikey", self.apiKey)}
files = {("file", fileName, open(artifact, "rb"))}
if self.debug:
log.msg(f"submitting to VT: {files!r}")
contentType, body = encode_multipart_formdata(fields, files)
producer = StringProducer(body)
headers = http_headers.Headers(
{
"User-Agent": [COWRIE_USER_AGENT],
"Accept": ["*/*"],
"Content-Type": [contentType],
}
)
d = self.agent.request(b"POST", vtUrl, headers, producer)
def cbBody(body):
return processResult(body)
def cbPartial(failure):
"""
Google HTTP Server does not set Content-Length. Twisted marks it as partial
"""
return processResult(failure.value.response)
def cbResponse(response):
if response.code == 200:
d = client.readBody(response)
d.addCallback(cbBody)
d.addErrback(cbPartial)
return d
else:
log.msg(f"VT Request failed: {response.code} {response.phrase}")
def cbError(failure):
failure.printTraceback()
def processResult(result):
if self.debug:
log.msg(f"VT postfile result: {result}")
result = result.decode("utf8")
j = json.loads(result)
# This is always a new resource, since we did the scan before
# so always create the comment
log.msg("response=0: posting comment")
if self.comment is True:
return self.postcomment(j["resource"])
else:
return
d.addCallback(cbResponse)
d.addErrback(cbError)
return d
def scanurl(self, entry):
"""
Check url scan report for a hash
"""
if entry["url"] in self.url_cache:
log.msg(
"output_virustotal: url {} was already successfully submitted".format(
entry["url"]
)
)
return
vtUrl = f"{VTAPI_URL}url/report".encode()
headers = http_headers.Headers({"User-Agent": [COWRIE_USER_AGENT]})
fields = {
"apikey": self.apiKey,
"resource": entry["url"],
"scan": 1,
"allinfo": 1,
}
body = StringProducer(urlencode(fields).encode("utf-8"))
d = self.agent.request(b"POST", vtUrl, headers, body)
def cbResponse(response):
"""
Main response callback, checks HTTP response code
"""
if response.code == 200:
d = client.readBody(response)
d.addCallback(cbBody)
return d
else:
log.msg(f"VT Request failed: {response.code} {response.phrase}")
def cbBody(body):
"""
Received body
"""
return processResult(body)
def cbPartial(failure):
"""
Google HTTP Server does not set Content-Length. Twisted marks it as partial
"""
return processResult(failure.value.response)
def cbError(failure):
log.msg("cbError")
failure.printTraceback()
def processResult(result):
"""
Extract the information we need from the body
"""
if self.debug:
log.msg(f"VT scanurl result: {result}")
result = result.decode("utf8")
j = json.loads(result)
log.msg("VT: {}".format(j["verbose_msg"]))
# we got a status=200 assume it was successfully submitted
self.url_cache[entry["url"]] = datetime.datetime.now()
if j["response_code"] == 0:
log.msg(
eventid="cowrie.virustotal.scanurl",
format="VT: New URL %(url)s",
session=entry["session"],
url=entry["url"],
is_new="true",
)
return d
elif j["response_code"] == 1 and "scans" not in j:
log.msg(
"VT: response=1: this was submitted before but has not yet been scanned."
)
elif j["response_code"] == 1 and "scans" in j:
log.msg("VT: response=1: this has been scanned before")
# Add detailed report to json log
scans_summary: dict[str, dict[str, str]] = {}
for feed, info in j["scans"].items():
feed_key = feed.lower()
scans_summary[feed_key] = {}
scans_summary[feed_key]["detected"] = str(info["detected"]).lower()
scans_summary[feed_key]["result"] = str(info["result"]).lower()
log.msg(
eventid="cowrie.virustotal.scanurl",
format="VT: URL %(url)s was found malicious by "
"%(positives)s out of %(total)s feeds (scanned on %(scan_date)s)",
session=entry["session"],
positives=j["positives"],
total=j["total"],
scan_date=j["scan_date"],
url=j["url"],
scans=scans_summary,
is_new="false",
)
log.msg("VT: permalink: {}".format(j["permalink"]))
elif j["response_code"] == -2:
log.msg("VT: response=-2: this has been queued for analysis already")
log.msg("VT: permalink: {}".format(j["permalink"]))
else:
log.msg("VT: unexpected response code: {}".format(j["response_code"]))
d.addCallback(cbResponse)
d.addErrback(cbError)
return d
def postcomment(self, resource):
"""
Send a comment to VirusTotal with Twisted
"""
vtUrl = f"{VTAPI_URL}comments/put".encode()
parameters = {
"resource": resource,
"comment": self.commenttext,
"apikey": self.apiKey,
}
headers = http_headers.Headers({"User-Agent": [COWRIE_USER_AGENT]})
body = StringProducer(urlencode(parameters).encode("utf-8"))
d = self.agent.request(b"POST", vtUrl, headers, body)
def cbBody(body):
return processResult(body)
def cbPartial(failure):
"""
Google HTTP Server does not set Content-Length. Twisted marks it as partial
"""
return processResult(failure.value.response)
def cbResponse(response):
if response.code == 200:
d = client.readBody(response)
d.addCallback(cbBody)
d.addErrback(cbPartial)
return d
else:
log.msg(f"VT Request failed: {response.code} {response.phrase}")
def cbError(failure):
failure.printTraceback()
def processResult(result):
if self.debug:
log.msg(f"VT postcomment result: {result}")
result = result.decode("utf8")
j = json.loads(result)
return j["response_code"]
d.addCallback(cbResponse)
d.addErrback(cbError)
return d
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
@implementer(IBodyProducer)
class StringProducer:
def __init__(self, body):
self.body = body
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.body)
return defer.succeed(None)
def pauseProducing(self):
pass
def resumeProducing(self):
pass
def stopProducing(self):
pass
def encode_multipart_formdata(fields, files):
"""
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, value) elements for data to be uploaded as files
Return (content_type, body) ready for httplib.HTTPS instance
"""
BOUNDARY = b"----------ThIs_Is_tHe_bouNdaRY_$"
L = []
for (key, value) in fields:
L.append(b"--" + BOUNDARY)
L.append(b'Content-Disposition: form-data; name="%s"' % key.encode())
L.append(b"")
L.append(value.encode())
for (key, filename, value) in files:
L.append(b"--" + BOUNDARY)
L.append(
b'Content-Disposition: form-data; name="%s"; filename="%s"'
% (key.encode(), filename.encode())
)
L.append(b"Content-Type: application/octet-stream")
L.append(b"")
L.append(value.read())
L.append(b"--" + BOUNDARY + b"--")
L.append(b"")
body = b"\r\n".join(L)
content_type = b"multipart/form-data; boundary=%s" % BOUNDARY
return content_type, body
| 18,353 | 35.416667 | 117 | py |
cowrie | cowrie-master/src/cowrie/output/jsonlog.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
import json
import os
from twisted.python import log
import cowrie.core.output
import cowrie.python.logfile
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
jsonlog output
"""
def start(self):
self.epoch_timestamp = CowrieConfig.getboolean(
"output_jsonlog", "epoch_timestamp", fallback=False
)
fn = CowrieConfig.get("output_jsonlog", "logfile")
dirs = os.path.dirname(fn)
base = os.path.basename(fn)
self.outfile = cowrie.python.logfile.CowrieDailyLogFile(
base, dirs, defaultMode=0o664
)
def stop(self):
self.outfile.flush()
def write(self, logentry):
if self.epoch_timestamp:
logentry["epoch"] = int(logentry["time"] * 1000000 / 1000)
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_") or i == "time" or i == "system":
del logentry[i]
try:
json.dump(logentry, self.outfile, separators=(",", ":"))
self.outfile.write("\n")
self.outfile.flush()
except TypeError:
log.err("jsonlog: Can't serialize: '" + repr(logentry) + "'")
| 2,770 | 36.958904 | 75 | py |
cowrie | cowrie-master/src/cowrie/output/hpfeeds3.py | """
Output plugin for HPFeeds
"""
from __future__ import annotations
import json
import logging
from hpfeeds.twisted import ClientSessionService
from twisted.internet import endpoints, ssl
from twisted.internet import reactor
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
Output plugin for HPFeeds
"""
channel = "cowrie.sessions"
def start(self):
if CowrieConfig.has_option("output_hpfeeds3", "channel"):
self.channel = CowrieConfig.get("output_hpfeeds3", "channel")
if CowrieConfig.has_option("output_hpfeeds3", "endpoint"):
endpoint = CowrieConfig.get("output_hpfeeds3", "endpoint")
else:
server = CowrieConfig.get("output_hpfeeds3", "server")
port = CowrieConfig.getint("output_hpfeeds3", "port")
if CowrieConfig.has_option("output_hpfeeds3", "tlscert"):
with open(CowrieConfig.get("output_hpfeeds3", "tlscert")) as fp:
authority = ssl.Certificate.loadPEM(fp.read())
options = ssl.optionsForClientTLS(server, authority)
endpoint = endpoints.SSL4ClientEndpoint(reactor, server, port, options)
else:
endpoint = endpoints.HostnameEndpoint(reactor, server, port)
ident = CowrieConfig.get("output_hpfeeds3", "identifier")
secret = CowrieConfig.get("output_hpfeeds3", "secret")
self.meta = {}
self.client = ClientSessionService(endpoint, ident, secret)
self.client.startService()
def stop(self):
self.client.stopService()
def write(self, entry):
session = entry["session"]
if entry["eventid"] == "cowrie.session.connect":
self.meta[session] = {
"session": session,
"startTime": entry["timestamp"],
"endTime": "",
"peerIP": entry["src_ip"],
"peerPort": entry["src_port"],
"hostIP": entry["dst_ip"],
"hostPort": entry["dst_port"],
"loggedin": None,
"credentials": [],
"commands": [],
"unknownCommands": [],
"urls": [],
"version": None,
"ttylog": None,
"hashes": set(),
"protocol": entry["protocol"],
}
elif entry["eventid"] == "cowrie.login.success":
u, p = entry["username"], entry["password"]
self.meta[session]["loggedin"] = (u, p)
elif entry["eventid"] == "cowrie.login.failed":
u, p = entry["username"], entry["password"]
self.meta[session]["credentials"].append((u, p))
elif entry["eventid"] == "cowrie.command.input":
c = entry["input"]
self.meta[session]["commands"].append(c)
elif entry["eventid"] == "cowrie.command.failed":
uc = entry["input"]
self.meta[session]["unknownCommands"].append(uc)
elif entry["eventid"] == "cowrie.session.file_download":
if "url" in entry:
url = entry["url"]
self.meta[session]["urls"].append(url)
self.meta[session]["hashes"].add(entry["shasum"])
elif entry["eventid"] == "cowrie.session.file_upload":
self.meta[session]["hashes"].add(entry["shasum"])
elif entry["eventid"] == "cowrie.client.version":
v = entry["version"]
self.meta[session]["version"] = v
elif entry["eventid"] == "cowrie.log.closed":
# entry["ttylog"]
with open(entry["ttylog"], "rb") as ttylog:
self.meta[session]["ttylog"] = ttylog.read().hex()
elif entry["eventid"] == "cowrie.session.closed":
meta = self.meta.pop(session, None)
if meta:
log.msg("publishing metadata to hpfeeds", logLevel=logging.DEBUG)
meta["endTime"] = entry["timestamp"]
meta["hashes"] = list(meta["hashes"])
self.client.publish(self.channel, json.dumps(meta).encode("utf-8"))
| 4,221 | 34.478992 | 87 | py |
cowrie | cowrie-master/src/cowrie/output/csirtg.py | from __future__ import annotations
import os
import sys
from datetime import datetime
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
token = CowrieConfig.get("output_csirtg", "token", fallback="a1b2c3d4")
if token == "a1b2c3d4":
log.msg("output_csirtg: token not found in configuration file")
sys.exit(1)
os.environ["CSIRTG_TOKEN"] = token
import csirtgsdk # noqa: E402
class Output(cowrie.core.output.Output):
"""
CSIRTG output
"""
def start(self):
"""
Start the output module.
Note that csirtsdk is imported here because it reads CSIRTG_TOKEN on import
Cowrie sets this environment variable.
"""
self.user = CowrieConfig.get("output_csirtg", "username")
self.feed = CowrieConfig.get("output_csirtg", "feed")
self.debug = CowrieConfig.getboolean("output_csirtg", "debug", fallback=False)
self.description = CowrieConfig.get("output_csirtg", "description")
self.context = {}
# self.client = csirtgsdk.client.Client()
def stop(self):
pass
def write(self, e):
"""
Only pass on connection events
"""
if e["eventid"] == "cowrie.session.connect":
self.submitIp(e)
def submitIp(self, e):
peerIP = e["src_ip"]
ts = e["timestamp"]
system = e.get("system", None)
if system not in [
"cowrie.ssh.factory.CowrieSSHFactory",
"cowrie.telnet.transport.HoneyPotTelnetFactory",
]:
return
today = str(datetime.now().date())
if not self.context.get(today):
self.context = {}
self.context[today] = set()
key = ",".join([peerIP, system])
if key in self.context[today]:
return
self.context[today].add(key)
tags = "scanner,ssh"
port = 22
if e["system"] == "cowrie.telnet.transport.HoneyPotTelnetFactory":
tags = "scanner,telnet"
port = 23
i = {
"user": self.user,
"feed": self.feed,
"indicator": peerIP,
"portlist": port,
"protocol": "tcp",
"tags": tags,
"firsttime": ts,
"lasttime": ts,
"description": self.description,
}
if self.debug is True:
log.msg(f"output_csirtg: Submitting {i!r} to CSIRTG")
ind = csirtgsdk.indicator.Indicator(i).submit()
if self.debug is True:
log.msg(f"output_csirtg: Submitted {ind!r} to CSIRTG")
log.msg("output_csirtg: submitted to csirtg at {} ".format(ind["location"]))
| 2,714 | 26.15 | 86 | py |
cowrie | cowrie-master/src/cowrie/output/mongodb.py | from __future__ import annotations
import pymongo
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
mongodb output
"""
def insert_one(self, collection, event):
try:
object_id = collection.insert_one(event).inserted_id
return object_id
except Exception as e:
log.msg(f"mongo error - {e}")
def update_one(self, collection, session, doc):
try:
object_id = collection.update_one({"session": session}, {"$set": doc})
return object_id
except Exception as e:
log.msg(f"mongo error - {e}")
def start(self):
db_addr = CowrieConfig.get("output_mongodb", "connection_string")
db_name = CowrieConfig.get("output_mongodb", "database")
try:
self.mongo_client = pymongo.MongoClient(db_addr)
self.mongo_db = self.mongo_client[db_name]
# Define Collections.
self.col_sensors = self.mongo_db["sensors"]
self.col_sessions = self.mongo_db["sessions"]
self.col_auth = self.mongo_db["auth"]
self.col_input = self.mongo_db["input"]
self.col_downloads = self.mongo_db["downloads"]
self.col_input = self.mongo_db["input"]
self.col_clients = self.mongo_db["clients"]
self.col_ttylog = self.mongo_db["ttylog"]
self.col_keyfingerprints = self.mongo_db["keyfingerprints"]
self.col_event = self.mongo_db["event"]
self.col_ipforwards = self.mongo_db["ipforwards"]
self.col_ipforwardsdata = self.mongo_db["ipforwardsdata"]
except Exception as e:
log.msg(f"output_mongodb: Error: {e!s}")
def stop(self):
self.mongo_client.close()
def write(self, entry):
for i in list(entry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del entry[i]
eventid = entry["eventid"]
if eventid == "cowrie.session.connect":
# Check if sensor exists, else add it.
doc = self.col_sensors.find_one({"sensor": self.sensor})
if not doc:
self.insert_one(self.col_sensors, entry)
# Prep extra elements just to make django happy later on
entry["starttime"] = entry["timestamp"]
entry["endtime"] = None
entry["sshversion"] = None
entry["termsize"] = None
log.msg("Session Created")
self.insert_one(self.col_sessions, entry)
elif eventid in ["cowrie.login.success", "cowrie.login.failed"]:
self.insert_one(self.col_auth, entry)
elif eventid in ["cowrie.command.input", "cowrie.command.failed"]:
self.insert_one(self.col_input, entry)
elif eventid == "cowrie.session.file_download":
# ToDo add a config section and offer to store the file in the db - useful for central logging
# we will add an option to set max size, if its 16mb or less we can store as normal,
# If over 16 either fail or we just use gridfs both are simple enough.
self.insert_one(self.col_downloads, entry)
elif eventid == "cowrie.client.version":
doc = self.col_sessions.find_one({"session": entry["session"]})
if doc:
doc["sshversion"] = entry["version"]
self.update_one(self.col_sessions, entry["session"], doc)
else:
pass
elif eventid == "cowrie.client.size":
doc = self.col_sessions.find_one({"session": entry["session"]})
if doc:
doc["termsize"] = "{}x{}".format(entry["width"], entry["height"])
self.update_one(self.col_sessions, entry["session"], doc)
else:
pass
elif eventid == "cowrie.session.closed":
doc = self.col_sessions.find_one({"session": entry["session"]})
if doc:
doc["endtime"] = entry["timestamp"]
self.update_one(self.col_sessions, entry["session"], doc)
else:
pass
elif eventid == "cowrie.log.closed":
# ToDo Compress to opimise the space and if your sending to remote db
with open(entry["ttylog"]) as ttylog:
entry["ttylogpath"] = entry["ttylog"]
entry["ttylog"] = ttylog.read().encode().hex()
self.insert_one(self.col_ttylog, entry)
elif eventid == "cowrie.client.fingerprint":
self.insert_one(self.col_keyfingerprints, entry)
elif eventid == "cowrie.direct-tcpip.request":
self.insert_one(self.col_ipforwards, entry)
elif eventid == "cowrie.direct-tcpip.data":
self.insert_one(self.col_ipforwardsdata, entry)
# Catch any other event types
else:
self.insert_one(self.col_event, entry)
| 5,057 | 37.318182 | 106 | py |
cowrie | cowrie-master/src/cowrie/output/discord.py | """
Simple Discord webhook logger
"""
from __future__ import annotations
import json
from io import BytesIO
from twisted.internet import reactor
from twisted.internet.ssl import ClientContextFactory
from twisted.web import client, http_headers
from twisted.web.client import FileBodyProducer
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
def start(self) -> None:
self.url = CowrieConfig.get("output_discord", "url").encode("utf8")
contextFactory = WebClientContextFactory()
self.agent = client.Agent(reactor, contextFactory)
def stop(self) -> None:
pass
def write(self, logentry):
webhook_message = "__New logentry__\n"
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
else:
webhook_message += f"{i}: `{logentry[i]}`\n"
self.postentry({"content": webhook_message})
def postentry(self, entry):
headers = http_headers.Headers(
{
b"Content-Type": [b"application/json"],
}
)
body = FileBodyProducer(BytesIO(json.dumps(entry).encode("utf8")))
self.agent.request(b"POST", self.url, headers, body)
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
| 1,486 | 26.537037 | 75 | py |
cowrie | cowrie-master/src/cowrie/output/graylog.py | """
Simple Graylog HTTP Graylog Extended Log Format (GELF) logger.
"""
from __future__ import annotations
import json
import time
from io import BytesIO
from twisted.internet import reactor
from twisted.internet.ssl import ClientContextFactory
from twisted.web import client, http_headers
from twisted.web.client import FileBodyProducer
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
def start(self) -> None:
self.url = CowrieConfig.get("output_graylog", "url").encode("utf8")
contextFactory = WebClientContextFactory()
self.agent = client.Agent(reactor, contextFactory)
def stop(self) -> None:
pass
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
gelf_message = {
"version": "1.1",
"host": logentry["sensor"],
"timestamp": time.time(),
"short_message": json.dumps(logentry),
"level": 1,
}
self.postentry(gelf_message)
def postentry(self, entry):
headers = http_headers.Headers(
{
b"Content-Type": [b"application/json"],
}
)
body = FileBodyProducer(BytesIO(json.dumps(entry).encode("utf8")))
self.agent.request(b"POST", self.url, headers, body)
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
| 1,607 | 26.254237 | 75 | py |
cowrie | cowrie-master/src/cowrie/output/abuseipdb.py | # MIT License
#
# Copyright (c) 2020 Benjamin Stephens <premier_contact@ben-stephens.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""
Cowrie plugin for reporting login attempts via the AbuseIPDB API.
"AbuseIPDB is a project dedicated to helping combat the spread of hackers,
spammers, and abusive activity on the internet." <https://www.abuseipdb.com/>
"""
from __future__ import annotations
__author__ = "Benjamin Stephens"
__version__ = "0.3b3"
import pickle
from collections import deque
from datetime import datetime
from json.decoder import JSONDecodeError
from pathlib import Path
from time import sleep, time
from treq import post
from twisted.internet import defer, threads
from twisted.internet import reactor
from twisted.python import log
from twisted.web import http
from cowrie.core import output
from cowrie.core.config import CowrieConfig
# How often we clean and dump and our lists/dict...
CLEAN_DUMP_SCHED = 600
# ...and the file we dump to.
DUMP_FILE: str = "aipdb.dump"
ABUSEIP_URL = "https://api.abuseipdb.com/api/v2/report"
# AbuseIPDB will just 429 us if we report an IP too often; currently 15 minutes
# (900 seconds); set lower limit here to protect againt bad user input.
REREPORT_MINIMUM = 900
class Output(output.Output):
def start(self):
self.tolerance_attempts: int = CowrieConfig.getint(
"output_abuseipdb", "tolerance_attempts", fallback=10
)
self.state_path = Path(CowrieConfig.get("output_abuseipdb", "dump_path"))
self.state_dump = self.state_path / DUMP_FILE
self.logbook = LogBook(self.tolerance_attempts, self.state_dump)
# Pass our instance of LogBook() to Reporter() so we don't end up
# working with different records.
self.reporter = Reporter(self.logbook, self.tolerance_attempts)
# We store the LogBook state any time a shutdown occurs. The rest of
# our start-up is just for loading and cleaning the previous state
try:
with open(self.state_dump, "rb") as f:
self.logbook.update(pickle.load(f))
# Check to see if we're still asleep after receiving a Retry-After
# header in a previous response
if self.logbook["sleeping"]:
t_wake: float = self.logbook["sleep_until"]
t_now: float = time()
if t_wake > t_now:
# If we're meant to be asleep, we'll set logbook.sleep to
# true and logbook.sleep_until to the time we can wake-up
self.logbook.sleeping = True
self.logbook.sleep_until = t_wake
# and we set an alarm so the reactor knows when he can drag
# us back out of bed
reactor.callLater(t_wake - t_now, self.logbook.wakeup)
del self.logbook["sleeping"]
del self.logbook["sleep_until"]
tolerated = self.logbook.pop("tolerated")
except (pickle.UnpicklingError, FileNotFoundError, KeyError):
if self.state_path.exists():
pass
else:
# If we don't already have an abuseipdb directory, let's make
# one with the necessary permissions now.
Path(self.state_path).mkdir(mode=0o700, parents=False, exist_ok=False)
# And we do a clean-up to make sure that we're not carrying any expired
# entries. The clean-up task ends by calling itself in a callLater,
# thus running every CLEAN_DUMP_SCHED seconds until the end of time.
self.logbook.cleanup_and_dump_state()
# If tolerance_attempts > the previous setting, we need to change the
# maximum length of the deque for any previously seen IP that we're
# loading, otherwise we'd potentially have IPs that may never trigger
# a report
try:
if tolerated != self.tolerance_attempts:
for k in self.logbook:
if self.logbook[k].__class__() == deque():
self.logbook[k] = deque(
[*self.logbook[k]], maxlen=self.tolerance_attempts
)
except UnboundLocalError:
pass
log.msg(
eventid="cowrie.abuseipdb.started",
format=f"AbuseIPDB Plugin version {__version__} started. Currently in beta.",
)
def stop(self):
self.logbook.cleanup_and_dump_state(mode=1)
def write(self, ev):
if self.logbook.sleeping:
return
if ev["eventid"].rsplit(".", 1)[0] == "cowrie.login":
# If tolerance_attempts was set to 1 or 0, we don't need to
# keep logs so our handling of the event is different than if > 1
if self.tolerance_attempts <= 1:
self.intolerant_observer(ev["src_ip"], time(), ev["username"])
else:
self.tolerant_observer(ev["src_ip"], time())
def intolerant_observer(self, ip, t, uname):
# Checks if already reported; if yes, checks if we can rereport yet.
# The entry for a reported IP is a tuple (None, time_reported). If IP
# is not already in logbook, reports it immediately
if ip in self.logbook:
if self.logbook.can_rereport(ip, t):
self.reporter.report_ip_single(ip, t, uname)
else:
return
else:
self.reporter.report_ip_single(ip, t, uname)
def tolerant_observer(self, ip, t):
# Appends the time an IP was seen to it's list in logbook. Once the
# length of the list equals tolerance_attempts, the IP is reported.
if ip in self.logbook:
try:
if self.logbook[ip][0]:
# Evaluates true if IP not already reported. If reported,
# logbook entry is of the form (None, time_reported).
self.logbook[ip].append(t)
self.logbook.clean_expired_timestamps(ip, t)
if len(self.logbook[ip]) >= self.tolerance_attempts:
self.reporter.report_ip_multiple(ip)
elif self.logbook.can_rereport(ip, t):
# Check if reported IP is ready for re-reporting
self.logbook[ip] = deque([t], maxlen=self.tolerance_attempts)
else:
return
except IndexError:
# If IP address was in logbook but had no entries then we're
# fine to re-report.
self.logbook[ip].append(t)
else:
self.logbook[ip] = deque([t], maxlen=self.tolerance_attempts)
class LogBook(dict):
"""
Dictionary class with methods for cleaning and dumping its state.
This class should be treated as global state. For the moment this is
achieved simply by passing the instance created by Output() directly to
Reporter(). Sharing is caring.
"""
def __init__(self, tolerance_attempts, state_dump):
self.sleeping = False
self.sleep_until: float = 0.0
self.tolerance_attempts = tolerance_attempts
self.tolerance_window: int = 60 * CowrieConfig.getint(
"output_abuseipdb", "tolerance_window", fallback=120
)
self.rereport_after: float = 3600 * CowrieConfig.getfloat(
"output_abuseipdb", "rereport_after", fallback=24
)
if self.rereport_after < REREPORT_MINIMUM:
self.rereport_after = REREPORT_MINIMUM
self.state_dump = state_dump
# To write our dump to disk we have a method we call in a thread so we
# don't block if we get slow io. This is a cheap hack to get a lock on
# the file. See self.write_dump_file()
self._writing = False
super().__init__()
def wakeup(self):
# This is the method we pass in a callLater() before we go to sleep.
self.sleeping = False
self.sleep_until = 0
self.recall = reactor.callLater(CLEAN_DUMP_SCHED, self.cleanup_and_dump_state)
log.msg(
eventid="cowrie.abuseipdb.wakeup",
format="AbuseIPDB plugin resuming activity after receiving "
"Retry-After header in previous response.",
)
def clean_expired_timestamps(self, ip_key, current_time):
# Performs popleft() if leftmost timestamp has expired. Continues doing
# so until either; 1) a timestamp within our reporting window is
# reached, or; 2) the list is empty.
while self[ip_key]:
if not self[ip_key][0]:
break
elif self[ip_key][0] < current_time - self.tolerance_window:
self[ip_key].popleft()
else:
break
def find_and_delete_empty_entries(self):
# Search and destroy method. Iterates over dict, appends k to delete_me
# where v is an empty list.
delete_me = []
for k in self:
if not self[k]:
delete_me.append(k)
self.delete_entries(delete_me)
def delete_entries(self, delete_me):
for i in delete_me:
del self[i]
def can_rereport(self, ip_key, current_time):
# Checks if an IP in the logbook that has already been reported is
# ready to be re-reported again.
try:
if current_time > self[ip_key][1] + self.rereport_after:
return True
elif self[ip_key][0] and self.tolerance_attempts <= 1:
# If we were previously running with a tolerance_attempts > 1
# and have been been restarted with tolerance_attempts <= 1,
# we could still be carrying some logs which would evaluate as
# false in our first test. Reported IPs will still evaluate
# false here.
return True
else:
return False
except IndexError:
return True
def cleanup_and_dump_state(self, mode=0):
# Runs a full clean-up of logbook. Re-calls itself in CLEAN_DUMP_SCHED
# seconds. MODES: 0) Normal looping task, and; 1) Sleep/Stop mode;
# cancels any scheduled callLater() and doesn't recall itself.
if mode == 1:
try:
self.recall.cancel()
except AttributeError:
pass
if self.sleeping:
t = self.sleep_until
else:
t = time()
delete_me = []
for k in self:
if self.can_rereport(k, t):
delete_me.append(k)
self.clean_expired_timestamps(k, t)
self.delete_entries(delete_me)
self.find_and_delete_empty_entries()
self.dump_state()
if mode == 0 and not self.sleeping:
self.recall = reactor.callLater(
CLEAN_DUMP_SCHED, self.cleanup_and_dump_state
)
def dump_state(self):
dump = {
"sleeping": self.sleeping,
"sleep_until": self.sleep_until,
# Store current self_tolerance for comparison on next start
"tolerated": self.tolerance_attempts,
}
for k, v in self.items():
dump[k] = v
reactor.callInThread(self.write_dump_file, dump)
def write_dump_file(self, dump):
# Check self._writing; waits for release; timeout after 10 seconds.
i = 0
while self._writing:
sleep(1)
i += 1
if i >= 10:
return
# Acquire 'lock'
self._writing = True
with open(self.state_dump, "wb") as f:
pickle.dump(dump, f, protocol=pickle.HIGHEST_PROTOCOL)
# Release 'lock'
self._writing = False
class Reporter:
"""
HTTP client and methods for preparing report paramaters.
"""
def __init__(self, logbook, attempts):
self.logbook = logbook
self.attempts = attempts
self.headers = {
"User-Agent": "Cowrie Honeypot AbuseIPDB plugin",
"Accept": "application/json",
"Key": CowrieConfig.get("output_abuseipdb", "api_key"),
}
def report_ip_single(self, ip, t, uname):
self.logbook[ip] = (None, t)
t = self.epoch_to_string_utc(t)
params = {
"ip": ip,
"categories": "18,22",
"comment": "Cowrie Honeypot: Unauthorised SSH/Telnet login attempt "
'with user "{}" at {}'.format(uname, t),
}
self.http_request(params)
def report_ip_multiple(self, ip):
t_last = self.logbook[ip].pop()
t_first = self.epoch_to_string_utc(self.logbook[ip].popleft())
self.logbook[ip] = (None, t_last)
t_last = self.epoch_to_string_utc(t_last)
params = {
"ip": ip,
"categories": "18,22",
"comment": "Cowrie Honeypot: {} unauthorised SSH/Telnet login attempts "
"between {} and {}".format(self.attempts, t_first, t_last),
}
self.http_request(params)
@staticmethod
def epoch_to_string_utc(t):
t_utc = datetime.utcfromtimestamp(t)
return t_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
@staticmethod
def log_response_failed(ip, response, reason):
log.msg(
eventid="cowrie.abuseipdb.reportfail",
format="AbuseIPDB plugin failed to report IP %(IP)s. Received HTTP "
"status code %(response)s in response. Reason: %(reason)s.",
IP=ip,
response=response,
reason=reason,
)
@defer.inlineCallbacks
def http_request(self, params):
try:
response = yield post(
url=ABUSEIP_URL,
headers=self.headers,
params=params,
)
except Exception as e:
log.msg(
eventid="cowrie.abuseipdb.reportfail",
format="AbuseIPDB plugin failed to report IP %(IP)s. "
"Exception raised: %(exception)s.",
IP=params["ip"],
exception=repr(e),
)
return
if response.code != http.OK:
if response.code == 429:
return self.rate_limit_handler(params, response)
try:
reason = http.RESPONSES[response.code].decode("utf-8")
except Exception:
reason = "Unable to determine."
self.log_response_failed(params["ip"], response.code, reason)
return
j = yield response.json()
log.msg(
eventid="cowrie.abuseipdb.reportedip",
format="AbuseIPDB plugin successfully reported %(IP)s. Current "
"AbuseIPDB confidence score for this IP is %(confidence)s",
IP=params["ip"],
confidence=j["data"]["abuseConfidenceScore"],
)
@defer.inlineCallbacks
def rate_limit_handler(self, params, response):
try:
j = yield response.json()
reason = j["errors"][0]["detail"]
except (KeyError, JSONDecodeError):
reason = "No other information provided or unexpected response"
self.log_response_failed(params["ip"], response.code, reason)
# AbuseIPDB will respond with a 429 and a Retry-After in its response
# headers if we've exceeded our limits for the day. Here we test for
# that header and, if it exists, put ourselves to sleep.
retry_after = yield response.headers.hasHeader("Retry-After")
if retry_after:
retry = yield response.headers.getRawHeaders("Retry-After")
retry = int(retry.pop())
if retry > 86340:
yield threads.deferToThread(self.sleeper_thread)
log.msg(
eventid="cowrie.abuseipdb.ratelimited",
format="AbuseIPDB plugin received Retry-After header > 86340 "
"seconds in previous response. Possible delayed quota "
"reset on AbuseIPDB servers; retrying request now.",
)
return self.http_request(params)
self.logbook.sleeping = True
self.logbook.sleep_until = time() + retry
reactor.callLater(retry, self.logbook.wakeup)
# It's not serious if we don't, but it's best to call the clean-up
# after logbook.sleeping has been set to True. The clean-up method
# checks for this flag and will use the wake-up time rather than
# the current time when sleep is set. mode=1 ensures we'll cancel
# any already scheduled calls to clean-up and don't schedule
# another one until the wake-up method calls it again.
self.logbook.cleanup_and_dump_state(mode=1)
self.epoch_to_string_utc(self.logbook.sleep_until)
log.msg(
eventid="cowrie.abuseipdb.ratelimited",
format="AbuseIPDB plugin received Retry-After header in "
"response. Reporting activity will resume in "
"%(retry_after)s seconds at %(wake_at)s",
retry_after=retry,
wake_at=self.epoch_to_string_utc(self.logbook.sleep_until),
)
def sleeper_thread(self):
# Cheap retry wait hack. Call in thread so as not to block.
sleep(10)
| 18,525 | 36.275654 | 89 | py |
cowrie | cowrie-master/src/cowrie/output/datadog.py | """
Simple Datadog HTTP logger.
"""
from __future__ import annotations
import json
import platform
from io import BytesIO
from twisted.internet import reactor
from twisted.internet.ssl import ClientContextFactory
from twisted.python import log
from twisted.web import client, http_headers
from twisted.web.client import FileBodyProducer
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
def start(self) -> None:
self.url = CowrieConfig.get("output_datadog", "url").encode("utf8")
self.api_key = CowrieConfig.get(
"output_datadog", "api_key", fallback=""
).encode("utf8")
if len(self.api_key) == 0:
log.msg("Datadog output module: API key is not defined.")
self.ddsource = CowrieConfig.get(
"output_datadog", "ddsource", fallback="cowrie"
)
self.ddtags = CowrieConfig.get("output_datadog", "ddtags", fallback="env:dev")
self.service = CowrieConfig.get(
"output_datadog", "service", fallback="honeypot"
)
contextFactory = WebClientContextFactory()
self.agent = client.Agent(reactor, contextFactory)
def stop(self) -> None:
pass
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
message = [
{
"ddsource": self.ddsource,
"ddtags": self.ddtags,
"hostname": platform.node(),
"message": json.dumps(logentry),
"service": self.service,
}
]
self.postentry(message)
def postentry(self, entry):
base_headers = {
b"Accept": [b"application/json"],
b"Content-Type": [b"application/json"],
b"DD-API-KEY": [self.api_key],
}
headers = http_headers.Headers(base_headers)
body = FileBodyProducer(BytesIO(json.dumps(entry).encode("utf8")))
self.agent.request(b"POST", self.url, headers, body)
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
| 2,286 | 30.763889 | 86 | py |
cowrie | cowrie-master/src/cowrie/output/crashreporter.py | """
Cowrie Crashreport
This output plugin is not like the others.
It has its own emit() function and does not use cowrie eventid's
to avoid circular calls
"""
from __future__ import annotations
import json
import treq
from twisted.internet import defer
from twisted.logger._levels import LogLevel
from twisted.python import log
import cowrie.core.output
from cowrie._version import __version__
from cowrie.core.config import CowrieConfig
COWRIE_USER_AGENT = f"Cowrie Honeypot {__version__}".encode("ascii")
COWRIE_URL = "https://api.cowrie.org/v1/crash"
class Output(cowrie.core.output.Output):
"""
Cowrie Crashreporter output
"""
def start(self):
"""
Start output plugin
"""
self.apiKey = CowrieConfig.get("output_cowrie", "api_key", fallback=None)
self.debug = CowrieConfig.getboolean("output_cowrie", "debug", fallback=False)
def emit(self, event):
"""
Note we override emit() here, unlike other plugins.
"""
if event.get("log_level") == LogLevel.critical:
self.crashreport(event)
def stop(self):
"""
Stop output plugin
"""
pass
def write(self, entry):
"""
events are done in emit() not in write()
"""
pass
@defer.inlineCallbacks
def crashreport(self, entry):
"""
Crash report
"""
try:
r = yield treq.post(
COWRIE_URL,
json.dumps(
{"log_text": entry.get("log_text"), "system": entry.get("system")}
).encode("ascii"),
headers={
b"Content-Type": [b"application/json"],
b"User-Agent": [COWRIE_USER_AGENT],
},
)
content = yield r.text()
if self.debug:
log.msg("crashreport: " + content)
except Exception as e:
log.msg("crashreporter failed" + repr(e))
| 2,005 | 24.392405 | 86 | py |
cowrie | cowrie-master/src/cowrie/output/elasticsearch.py | # Simple elasticsearch logger
from __future__ import annotations
from typing import Any
from elasticsearch import Elasticsearch, NotFoundError
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
elasticsearch output
"""
index: str
pipeline: str
es: Any
def start(self):
host = CowrieConfig.get("output_elasticsearch", "host")
port = CowrieConfig.get("output_elasticsearch", "port")
self.index = CowrieConfig.get("output_elasticsearch", "index")
self.type = CowrieConfig.get("output_elasticsearch", "type")
self.pipeline = CowrieConfig.get("output_elasticsearch", "pipeline")
# new options (creds + https)
username = CowrieConfig.get("output_elasticsearch", "username", fallback=None)
password = CowrieConfig.get("output_elasticsearch", "password", fallback=None)
use_ssl = CowrieConfig.getboolean("output_elasticsearch", "ssl", fallback=False)
ca_certs = CowrieConfig.get("output_elasticsearch", "ca_certs", fallback=None)
verify_certs = CowrieConfig.getboolean(
"output_elasticsearch", "verify_certs", fallback=True
)
options: dict[str, Any] = {}
# connect
if (username is not None) and (password is not None):
options["http_auth"] = (username, password)
if use_ssl:
options["scheme"] = "https"
options["use_ssl"] = use_ssl
options["ssl_show_warn"] = False
options["verify_certs"] = verify_certs
if verify_certs:
options["ca_certs"] = ca_certs
# connect
self.es = Elasticsearch(f"{host}:{port}", **options)
# self.es = Elasticsearch('{0}:{1}'.format(self.host, self.port))
self.check_index()
# ensure geoip pipeline is well set up
if self.pipeline == "geoip":
# create a new feature if it does not exist yet
self.check_geoip_mapping()
# ensure the geoip pipeline is setup
self.check_geoip_pipeline()
def check_index(self):
"""
This function check whether the index exists.
"""
if not self.es.indices.exists(index=self.index):
# create index
self.es.indices.create(index=self.index)
def check_geoip_mapping(self):
"""
This function ensures that the right mapping is set up
to convert source ip (src_ip) into geo data.
"""
if self.es.indices.exists(index=self.index):
# Add mapping (to add geo field -> for geoip)
# The new feature is named 'geo'.
# You can put mappings several times, if it exists the
# PUT requests will be ignored.
self.es.indices.put_mapping(
index=self.index,
body={
"properties": {
"geo": {"properties": {"location": {"type": "geo_point"}}}
}
},
)
def check_geoip_pipeline(self):
"""
This function aims to set at least a geoip pipeline
to map IP to geo locations
"""
try:
# check if the geoip pipeline exists. An error
# is raised if the pipeline does not exist
self.es.ingest.get_pipeline(id=self.pipeline)
except NotFoundError:
# geoip pipeline
body = {
"description": "Add geoip info",
"processors": [
{
"geoip": {
"field": "src_ip", # input field of the pipeline (source address)
"target_field": "geo", # output field of the pipeline (geo data)
"database_file": "GeoLite2-City.mmdb",
}
}
],
}
self.es.ingest.put_pipeline(id=self.pipeline, body=body)
def stop(self):
pass
def write(self, logentry):
for i in list(logentry.keys()):
# remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
self.es.index(
index=self.index, doc_type=self.type, body=logentry, pipeline=self.pipeline
)
| 4,393 | 33.873016 | 94 | py |
cowrie | cowrie-master/src/cowrie/output/localsyslog.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
import syslog
import twisted.python.syslog
import cowrie.core.cef
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
localsyslog output
"""
def start(self):
self.format = CowrieConfig.get("output_localsyslog", "format")
facilityString = CowrieConfig.get("output_localsyslog", "facility")
self.facility = vars(syslog)["LOG_" + facilityString]
self.syslog = twisted.python.syslog.SyslogObserver(
prefix="cowrie", facility=self.facility
)
def stop(self):
pass
def write(self, logentry):
if "isError" not in logentry:
logentry["isError"] = False
if self.format == "cef":
self.syslog.emit(
{
"message": [cowrie.core.cef.formatCef(logentry)],
"isError": False,
"system": "cowrie",
}
)
else:
# message appears with additional spaces if message key is defined
logentry["message"] = [logentry["message"]]
self.syslog.emit(logentry)
| 2,683 | 36.277778 | 78 | py |
cowrie | cowrie-master/src/cowrie/output/threatjammer.py | # Copyright 2022 by GOODDATA LABS SL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Cowrie plugin for reporting login attempts via the ThreatJammer.com Report API.
"ThreatJammer.com is a tool to track and detect attacks" <https://threatjammer.com>
"""
__author__ = "Diego Parrilla Santamaria"
__version__ = "0.1.0"
import datetime
from typing import Optional
from collections.abc import Generator
from treq import post
from twisted.internet import defer
from twisted.python import log
from twisted.web import http
from cowrie.core import output
from cowrie.core.config import CowrieConfig
# Buffer flush frequency (in minutes)
BUFFER_FLUSH_FREQUENCY: int = 1
# Buffer flush max size
BUFFER_FLUSH_MAX_SIZE: int = 1000
# API URL
THREATJAMMER_REPORT_URL: str = "https://dublin.report.threatjammer.com/v1/ip"
# Default Time To Live (TTL) in the ThreatJammer.com private blocklist. In minutes.
THREATJAMMER_DEFAULT_TTL: int = 86400
# Default category to store the ip address.
THREATJAMMER_DEFAULT_CATEGORY: str = "ABUSE"
# Track the login event
THREATJAMMER_DEFAULT_TRACK_LOGIN: bool = True
# Track the session event
THREATJAMMER_DEFAULT_TRACK_SESSION: bool = False
# Default tags to store the ip address.
THREATJAMMER_DEFAULT_TAGS: str = "COWRIE"
class HTTPClient:
"""
HTTP client to report the IP adress set
"""
def __init__(self, api_url: str, bearer_token: str):
self.headers = {
"User-Agent": "Cowrie Honeypot ThreatJammer.com output plugin",
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": f"Bearer {bearer_token}",
}
self.api_url = api_url
def report(
self,
ip_set: set[str],
category: str,
ttl: int = 0,
tags: Optional[list[str]] = None,
) -> None:
payload: dict = {
"addresses": list(ip_set),
"type": category,
"ttl": ttl,
"tags": tags,
}
self._post(payload)
@defer.inlineCallbacks
def _post(self, payload: dict) -> Generator:
try:
response = yield post(
url=self.api_url,
headers=self.headers,
json=payload,
)
except Exception as e:
log.msg(
eventid="cowrie.threatjammer.reportfail",
format="ThreatJammer.com output plugin failed when reporting the payload %(payload)s. "
"Exception raised: %(exception)s.",
payload=str(payload),
exception=repr(e),
)
return
if response.code != http.ACCEPTED:
reason = yield response.text()
log.msg(
eventid="cowrie.threatjammer.reportfail",
format="ThreatJammer.com output plugin failed to report the payload %(payload)s. Returned the\
HTTP status code %(response)s. Reason: %(reason)s.",
payload=str(payload),
response=response.code,
reason=reason,
)
else:
log.msg(
eventid="cowrie.threatjammer.reportedipset",
format="ThreatJammer.com output plugin successfully reported %(payload)s.",
payload=str(payload),
)
return
class Output(output.Output):
def start(self):
self.api_url = CowrieConfig.get(
"output_threatjammer",
"api_url",
fallback=THREATJAMMER_REPORT_URL,
)
self.default_ttl = CowrieConfig.getint(
"output_threatjammer", "ttl", fallback=THREATJAMMER_DEFAULT_TTL
)
self.default_category = CowrieConfig.get(
"output_threatjammer",
"category",
fallback=THREATJAMMER_DEFAULT_CATEGORY,
)
self.track_login = CowrieConfig.getboolean(
"output_threatjammer",
"track_login",
fallback=THREATJAMMER_DEFAULT_TRACK_LOGIN,
)
self.track_session = CowrieConfig.getboolean(
"output_threatjammer",
"track_session",
fallback=THREATJAMMER_DEFAULT_TRACK_SESSION,
)
self.bearer_token = CowrieConfig.get("output_threatjammer", "bearer_token")
self.tags = CowrieConfig.get("output_threatjammer", "tags").split(",")
self.last_report: int = -1
self.report_bucket: int = BUFFER_FLUSH_MAX_SIZE
self.ip_set: set[str] = set()
self.track_events = []
if self.track_login:
self.track_events.append("cowrie.login")
if self.track_session:
self.track_events.append("cowrie.session")
self.http_client = HTTPClient(self.api_url, self.bearer_token)
log.msg(
eventid="cowrie.threatjammer.reporterinitialized",
format="ThreatJammer.com output plugin successfully initialized.\
Category=%(category)s. TTL=%(ttl)s. Session Tracking=%(session_tracking)s. Login Tracking=%(login_tracking)s",
category=self.default_category,
ttl=self.default_ttl,
session_tracking=self.track_session,
login_tracking=self.track_login,
)
def stop(self):
log.msg(
eventid="cowrie.threatjammer.reporterterminated",
format="ThreatJammer.com output plugin successfully terminated. Bye!",
)
def write(self, ev):
if ev["eventid"].rsplit(".", 1)[0] in self.track_events:
source_ip: str = ev["src_ip"]
self.ip_set.add(source_ip)
if self.last_report == -1:
# Never execute in this cycle. Store timestamp of the first element.
self.last_report = int(datetime.datetime.utcnow().timestamp())
self.report_bucket -= 1
if (
self.report_bucket == 0
or (int(datetime.datetime.utcnow().timestamp()) - self.last_report)
> BUFFER_FLUSH_FREQUENCY * 60
):
# Flush the ip_set if 1000 ips counted or more than 10 minutes since last flush
self.http_client.report(
ip_set=self.ip_set,
category=self.default_category,
ttl=self.default_ttl,
tags=self.tags,
)
self.ip_set = set()
self.report_bucket = BUFFER_FLUSH_MAX_SIZE
self.last_report = -1
| 7,010 | 32.545455 | 111 | py |
cowrie | cowrie-master/src/cowrie/output/dshield.py | """
Send SSH logins to SANS DShield.
See https://isc.sans.edu/ssh.html
"""
from __future__ import annotations
import base64
import hashlib
import hmac
import re
import time
import dateutil.parser
import requests
from twisted.internet import reactor
from twisted.internet import threads
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
dshield output
"""
debug: bool = False
userid: str
batch_size: int
batch: list
def start(self):
self.auth_key = CowrieConfig.get("output_dshield", "auth_key")
self.userid = CowrieConfig.get("output_dshield", "userid")
self.batch_size = CowrieConfig.getint("output_dshield", "batch_size")
self.debug = CowrieConfig.getboolean("output_dshield", "debug", fallback=False)
self.batch = [] # This is used to store login attempts in batches
def stop(self):
pass
def write(self, entry):
if (
entry["eventid"] == "cowrie.login.success"
or entry["eventid"] == "cowrie.login.failed"
):
date = dateutil.parser.parse(entry["timestamp"])
self.batch.append(
{
"date": str(date.date()),
"time": date.time().strftime("%H:%M:%S"),
"timezone": time.strftime("%z"),
"source_ip": entry["src_ip"],
"user": entry["username"],
"password": entry["password"],
}
)
if len(self.batch) >= self.batch_size:
batch_to_send = self.batch
self.submit_entries(batch_to_send)
self.batch = []
def transmission_error(self, batch):
self.batch.extend(batch)
if len(self.batch) > self.batch_size * 2:
self.batch = self.batch[-self.batch_size :]
def submit_entries(self, batch):
"""
Large parts of this method are adapted from kippo-pyshield by jkakavas
Many thanks to their efforts. https://github.com/jkakavas/kippo-pyshield
"""
# The nonce is predefined as explained in the original script :
# trying to avoid sending the authentication key in the "clear" but
# not wanting to deal with a full digest like exchange. Using a
# fixed nonce to mix up the limited userid.
_nonceb64 = "ElWO1arph+Jifqme6eXD8Uj+QTAmijAWxX1msbJzXDM="
log_output = ""
for attempt in self.batch:
log_output += "{}\t{}\t{}\t{}\t{}\t{}\n".format(
attempt["date"],
attempt["time"],
attempt["timezone"],
attempt["source_ip"],
attempt["user"],
attempt["password"],
)
nonce = base64.b64decode(_nonceb64)
digest = base64.b64encode(
hmac.new(
nonce + self.userid.encode("ascii"),
base64.b64decode(self.auth_key),
hashlib.sha256,
).digest()
)
auth_header = "credentials={} nonce={} userid={}".format(
digest.decode("ascii"), _nonceb64, self.userid
)
headers = {"X-ISC-Authorization": auth_header, "Content-Type": "text/plain"}
if self.debug:
log.msg(f"dshield: posting: {headers!r}")
log.msg(f"dshield: posting: {log_output}")
req = threads.deferToThread(
requests.request,
method="PUT",
url="https://secure.dshield.org/api/file/sshlog",
headers=headers,
timeout=10,
data=log_output,
)
def check_response(resp):
failed = False
response = resp.content.decode("utf8")
if self.debug:
log.msg(f"dshield: status code {resp.status_code}")
log.msg(f"dshield: response {resp.content}")
if resp.ok:
sha1_regex = re.compile(r"<sha1checksum>([^<]+)<\/sha1checksum>")
sha1_match = sha1_regex.search(response)
sha1_local = hashlib.sha1()
sha1_local.update(log_output.encode("utf8"))
if sha1_match is None:
log.msg(
"dshield: ERROR: Could not find sha1checksum in response: {}".format(
repr(response)
)
)
failed = True
elif sha1_match.group(1) != sha1_local.hexdigest():
log.msg(
"dshield: ERROR: SHA1 Mismatch {} {} .".format(
sha1_match.group(1), sha1_local.hexdigest()
)
)
failed = True
md5_regex = re.compile(r"<md5checksum>([^<]+)<\/md5checksum>")
md5_match = md5_regex.search(response)
md5_local = hashlib.md5()
md5_local.update(log_output.encode("utf8"))
if md5_match is None:
log.msg("dshield: ERROR: Could not find md5checksum in response")
failed = True
elif md5_match.group(1) != md5_local.hexdigest():
log.msg(
"dshield: ERROR: MD5 Mismatch {} {} .".format(
md5_match.group(1), md5_local.hexdigest()
)
)
failed = True
log.msg(
f"dshield: SUCCESS: Sent {log_output} bytes worth of data to secure.dshield.org"
)
else:
log.msg(f"dshield ERROR: error {resp.status_code}.")
log.msg(f"dshield response was {response}")
failed = True
if failed:
# Something went wrong, we need to add them to batch.
reactor.callFromThread(self.transmission_error, batch)
req.addCallback(check_response)
| 6,123 | 33.994286 | 100 | py |
cowrie | cowrie-master/src/cowrie/output/splunk.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
"""
Splunk HTTP Event Collector (HEC) Connector.
Not ready for production use.
JSON log file is still recommended way to go
"""
from __future__ import annotations
import json
from io import BytesIO
from typing import Any
from twisted.internet import reactor
from twisted.internet.ssl import ClientContextFactory
from twisted.python import log
from twisted.web import client, http_headers
from twisted.web.client import FileBodyProducer
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
Splunk HEC output
"""
token: str
agent: Any
url: bytes
def start(self) -> None:
self.token = CowrieConfig.get("output_splunk", "token")
self.url = CowrieConfig.get("output_splunk", "url").encode("utf8")
self.index = CowrieConfig.get("output_splunk", "index", fallback=None)
self.source = CowrieConfig.get("output_splunk", "source", fallback=None)
self.sourcetype = CowrieConfig.get("output_splunk", "sourcetype", fallback=None)
self.host = CowrieConfig.get("output_splunk", "host", fallback=None)
contextFactory = WebClientContextFactory()
# contextFactory.method = TLSv1_METHOD
self.agent = client.Agent(reactor, contextFactory)
def stop(self) -> None:
pass
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
splunkentry = {}
if self.index:
splunkentry["index"] = self.index
if self.source:
splunkentry["source"] = self.source
if self.sourcetype:
splunkentry["sourcetype"] = self.sourcetype
if self.host:
splunkentry["host"] = self.host
else:
splunkentry["host"] = logentry["sensor"]
splunkentry["event"] = logentry
self.postentry(splunkentry)
def postentry(self, entry):
"""
Send a JSON log entry to Splunk with Twisted
"""
headers = http_headers.Headers(
{
b"User-Agent": [b"Cowrie SSH Honeypot"],
b"Authorization": [b"Splunk " + self.token.encode("utf8")],
b"Content-Type": [b"application/json"],
}
)
body = FileBodyProducer(BytesIO(json.dumps(entry).encode("utf8")))
d = self.agent.request(b"POST", self.url, headers, body)
def cbBody(body):
return processResult(body)
def cbPartial(failure):
"""
Google HTTP Server does not set Content-Length. Twisted marks it as partial
"""
failure.printTraceback()
return processResult(failure.value.response)
def cbResponse(response):
if response.code == 200:
return
else:
log.msg(f"SplunkHEC response: {response.code} {response.phrase}")
d = client.readBody(response)
d.addCallback(cbBody)
d.addErrback(cbPartial)
return d
def cbError(failure):
failure.printTraceback()
def processResult(result):
j = json.loads(result)
log.msg("SplunkHEC response: {}".format(j["text"]))
d.addCallback(cbResponse)
d.addErrback(cbError)
return d
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
| 3,647 | 30.179487 | 88 | py |
cowrie | cowrie-master/src/cowrie/output/sqlite.py | from __future__ import annotations
import sqlite3
from typing import Any
from twisted.enterprise import adbapi
from twisted.internet import defer
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
sqlite output
"""
db: Any
def start(self):
"""
Start sqlite3 logging module using Twisted ConnectionPool.
Need to be started with check_same_thread=False. See
https://twistedmatrix.com/trac/ticket/3629.
"""
sqliteFilename = CowrieConfig.get("output_sqlite", "db_file")
try:
self.db = adbapi.ConnectionPool(
"sqlite3", database=sqliteFilename, check_same_thread=False
)
except sqlite3.OperationalError as e:
log.msg(e)
self.db.start()
def stop(self):
"""
Close connection to db
"""
self.db.close()
def sqlerror(self, error):
log.err("sqlite error")
error.printTraceback()
def simpleQuery(self, sql, args):
"""
Just run a deferred sql query, only care about errors
"""
d = self.db.runQuery(sql, args)
d.addErrback(self.sqlerror)
@defer.inlineCallbacks
def write(self, entry):
if entry["eventid"] == "cowrie.session.connect":
r = yield self.db.runQuery(
"SELECT `id` FROM `sensors` " "WHERE `ip` = ?", (self.sensor,)
)
if r and r[0][0]:
sensorid = r[0][0]
else:
yield self.db.runQuery(
"INSERT INTO `sensors` (`ip`) " "VALUES (?)", (self.sensor,)
)
r = yield self.db.runQuery("SELECT LAST_INSERT_ROWID()")
sensorid = int(r[0][0])
self.simpleQuery(
"INSERT INTO `sessions` (`id`, `starttime`, `sensor`, `ip`) "
"VALUES (?, ?, ?, ?)",
(entry["session"], entry["timestamp"], sensorid, entry["src_ip"]),
)
elif entry["eventid"] == "cowrie.login.success":
self.simpleQuery(
"INSERT INTO `auth` (`session`, `success`, `username`, `password`, `timestamp`) "
"VALUES (?, ?, ?, ?, ?)",
(
entry["session"],
1,
entry["username"],
entry["password"],
entry["timestamp"],
),
)
elif entry["eventid"] == "cowrie.login.failed":
self.simpleQuery(
"INSERT INTO `auth` (`session`, `success`, `username`, `password`, `timestamp`) "
"VALUES (?, ?, ?, ?, ?)",
(
entry["session"],
0,
entry["username"],
entry["password"],
entry["timestamp"],
),
)
elif entry["eventid"] == "cowrie.command.input":
self.simpleQuery(
"INSERT INTO `input` (`session`, `timestamp`, `success`, `input`) "
"VALUES (?, ?, ?, ?)",
(entry["session"], entry["timestamp"], 1, entry["input"]),
)
elif entry["eventid"] == "cowrie.command.failed":
self.simpleQuery(
"INSERT INTO `input` (`session`, `timestamp`, `success`, `input`) "
"VALUES (?, ?, ?, ?)",
(entry["session"], entry["timestamp"], 0, entry["input"]),
)
elif entry["eventid"] == "cowrie.session.params":
self.simpleQuery(
"INSERT INTO `params` (`session`, `arch`) " "VALUES (?, ?)",
(entry["session"], entry["arch"]),
)
elif entry["eventid"] == "cowrie.session.file_download":
self.simpleQuery(
"INSERT INTO `downloads` (`session`, `timestamp`, `url`, `outfile`, `shasum`) "
"VALUES (?, ?, ?, ?, ?)",
(
entry["session"],
entry["timestamp"],
entry["url"],
entry["outfile"],
entry["shasum"],
),
)
elif entry["eventid"] == "cowrie.session.file_download.failed":
self.simpleQuery(
"INSERT INTO `downloads` (`session`, `timestamp`, `url`, `outfile`, `shasum`) "
"VALUES (?, ?, ?, ?, ?)",
(entry["session"], entry["timestamp"], entry["url"], "NULL", "NULL"),
)
elif entry["eventid"] == "cowrie.client.version":
r = yield self.db.runQuery(
"SELECT `id` FROM `clients` " "WHERE `version` = ?", (entry["version"],)
)
if r and r[0][0]:
id = int(r[0][0])
else:
yield self.db.runQuery(
"INSERT INTO `clients` (`version`) " "VALUES (?)",
(entry["version"],),
)
r = yield self.db.runQuery("SELECT LAST_INSERT_ROWID()")
id = int(r[0][0])
self.simpleQuery(
"UPDATE `sessions` " "SET `client` = ? " "WHERE `id` = ?",
(id, entry["session"]),
)
elif entry["eventid"] == "cowrie.client.size":
self.simpleQuery(
"UPDATE `sessions` " "SET `termsize` = ? " "WHERE `id` = ?",
("{}x{}".format(entry["width"], entry["height"]), entry["session"]),
)
elif entry["eventid"] == "cowrie.session.closed":
self.simpleQuery(
"UPDATE `sessions` " "SET `endtime` = ? " "WHERE `id` = ?",
(entry["timestamp"], entry["session"]),
)
elif entry["eventid"] == "cowrie.log.closed":
self.simpleQuery(
"INSERT INTO `ttylog` (`session`, `ttylog`, `size`) "
"VALUES (?, ?, ?)",
(entry["session"], entry["ttylog"], entry["size"]),
)
elif entry["eventid"] == "cowrie.client.fingerprint":
self.simpleQuery(
"INSERT INTO `keyfingerprints` (`session`, `username`, `fingerprint`) "
"VALUES (?, ?, ?)",
(entry["session"], entry["username"], entry["fingerprint"]),
)
elif entry["eventid"] == "cowrie.direct-tcpip.request":
self.simpleQuery(
"INSERT INTO `ipforwards` (`session`, `timestamp`, `dst_ip`, `dst_port`) "
"VALUES (?, ?, ?, ?)",
(
entry["session"],
entry["timestamp"],
entry["dst_ip"],
entry["dst_port"],
),
)
elif entry["eventid"] == "cowrie.direct-tcpip.data":
self.simpleQuery(
"INSERT INTO `ipforwardsdata` (`session`, `timestamp`, `dst_ip`, `dst_port`, `data`) "
"VALUES (?, ?, ?, ?, ?)",
(
entry["session"],
entry["timestamp"],
entry["dst_ip"],
entry["dst_port"],
entry["data"],
),
)
| 7,367 | 33.919431 | 102 | py |
cowrie | cowrie-master/src/cowrie/output/textlog.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
import cowrie.core.cef
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
textlog output
"""
def start(self):
self.format = CowrieConfig.get("output_textlog", "format")
self.outfile = open(
CowrieConfig.get("output_textlog", "logfile"), "a", encoding="utf-8"
)
def stop(self):
pass
def write(self, logentry):
if self.format == "cef":
self.outfile.write("{} ".format(logentry["timestamp"]))
self.outfile.write(f"{cowrie.core.cef.formatCef(logentry)}\n")
else:
self.outfile.write("{} ".format(logentry["timestamp"]))
self.outfile.write("{} ".format(logentry["session"]))
self.outfile.write("{}\n".format(logentry["message"]))
self.outfile.flush()
| 2,383 | 38.733333 | 80 | py |
cowrie | cowrie-master/src/cowrie/output/redis.py | from __future__ import annotations
import json
from configparser import NoOptionError
import redis
import cowrie.core.output
from cowrie.core.config import CowrieConfig
SEND_METHODS = {
"lpush": lambda redis_client, key, message: redis_client.lpush(key, message),
"rpush": lambda redis_client, key, message: redis_client.rpush(key, message),
"publish": lambda redis_client, key, message: redis_client.publish(key, message),
}
class Output(cowrie.core.output.Output):
"""
redis output
"""
def start(self):
"""
Initialize pymisp module and ObjectWrapper (Abstract event and object creation)
"""
host: str = CowrieConfig.get("output_redis", "host")
port: int = CowrieConfig.getint("output_redis", "port")
try:
db = CowrieConfig.getint("output_redis", "db")
except NoOptionError:
db = 0
try:
password = CowrieConfig.get("output_redis", "password")
except NoOptionError:
password = None
self.redis = redis.StrictRedis(host=host, port=port, db=db, password=password)
self.keyname = CowrieConfig.get("output_redis", "keyname")
try:
self.send_method = SEND_METHODS[
CowrieConfig.get("output_redis", "send_method")
]
except (NoOptionError, KeyError):
self.send_method = SEND_METHODS["lpush"]
def stop(self):
pass
def write(self, logentry):
"""
Push to redis
"""
# Add the entry to redis
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
self.send_method(self.redis, self.keyname, json.dumps(logentry))
| 1,797 | 27.539683 | 87 | py |
cowrie | cowrie-master/src/cowrie/output/misp.py | from __future__ import annotations
import warnings
from functools import wraps
from pathlib import Path
from pymisp import MISPAttribute, MISPEvent, MISPSighting
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
try:
from pymisp import ExpandedPyMISP as PyMISP
except ImportError:
from pymisp import PyMISP as PyMISP
# PyMISP is very verbose regarding Python 2 deprecation
def ignore_warnings(f):
@wraps(f)
def inner(*args, **kwargs):
with warnings.catch_warnings(record=True):
warnings.simplefilter("ignore")
response = f(*args, **kwargs)
return response
return inner
class Output(cowrie.core.output.Output):
"""
MISP Upload Plugin for Cowrie.
This Plugin creates a new event for unseen file uploads
or adds sightings for previously seen files.
The decision is done by searching for the SHA 256 sum in all matching attributes.
"""
debug: bool
@ignore_warnings
def start(self):
"""
Start output plugin
"""
misp_url = CowrieConfig.get("output_misp", "base_url")
misp_key = CowrieConfig.get("output_misp", "api_key")
misp_verifycert = (
"true" == CowrieConfig.get("output_misp", "verify_cert").lower()
)
self.misp_api = PyMISP(
url=misp_url, key=misp_key, ssl=misp_verifycert, debug=False
)
self.debug = CowrieConfig.getboolean("output_misp", "debug", fallback=False)
self.publish = CowrieConfig.getboolean(
"output_misp", "publish_event", fallback=False
)
def stop(self):
"""
Stop output plugin
"""
pass
def write(self, entry):
"""
Push file download to MISP
"""
if entry["eventid"] == "cowrie.session.file_download":
file_sha_attrib = self.find_attribute("sha256", entry["shasum"])
if file_sha_attrib:
# file is known, add sighting!
if self.debug:
log.msg("File known, add sighting")
self.add_sighting(entry, file_sha_attrib)
else:
# file is unknown, new event with upload
if self.debug:
log.msg("File unknwon, add new event")
self.create_new_event(entry)
@ignore_warnings
def find_attribute(self, attribute_type, searchterm):
"""
Returns a matching attribute or None if nothing was found.
"""
result = self.misp_api.search(
controller="attributes", type_attribute=attribute_type, value=searchterm
)
if result["Attribute"]:
return result["Attribute"][0]
else:
return None
@ignore_warnings
def create_new_event(self, entry):
attribute = MISPAttribute()
attribute.type = "malware-sample"
attribute.value = entry["shasum"]
attribute.data = Path(entry["outfile"])
attribute.comment = "File uploaded to Cowrie ({})".format(entry["sensor"])
attribute.expand = "binary"
if "url" in entry:
attributeURL = MISPAttribute()
attributeURL.type = "url"
attributeURL.value = entry["url"]
attributeURL.to_ids = True
else:
attributeURL = MISPAttribute()
attributeURL.type = "text"
attributeURL.value = "External upload"
attributeIP = MISPAttribute()
attributeIP.type = "ip-src"
attributeIP.value = entry["src_ip"]
attributeDT = MISPAttribute()
attributeDT.type = "datetime"
attributeDT.value = entry["timestamp"]
event = MISPEvent()
event.info = "File uploaded to Cowrie ({})".format(entry["sensor"])
event.add_tag("tlp:white")
event.attributes = [attribute, attributeURL, attributeIP, attributeDT]
event.run_expansions()
if self.publish:
event.publish()
result = self.misp_api.add_event(event)
if self.debug:
log.msg(f"Event creation result: \n{result}")
@ignore_warnings
def add_sighting(self, entry, attribute):
sighting = MISPSighting()
sighting.source = "{} (Cowrie)".format(entry["sensor"])
self.misp_api.add_sighting(sighting, attribute)
| 4,393 | 31.308824 | 85 | py |
cowrie | cowrie-master/src/cowrie/output/slack.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
import json
import time
from slack import WebClient
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
slack output
"""
def start(self):
self.slack_channel = CowrieConfig.get("output_slack", "channel")
self.slack_token = CowrieConfig.get("output_slack", "token")
def stop(self):
pass
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
self.sc = WebClient(self.slack_token)
self.sc.chat_postMessage(
channel=self.slack_channel,
text="{} {}".format(
time.strftime("%Y-%m-%d %H:%M:%S"),
json.dumps(logentry, indent=4, sort_keys=True),
),
)
| 2,394 | 35.287879 | 75 | py |
cowrie | cowrie-master/src/cowrie/output/cuckoo.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS`` AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""
Send downloaded/uplaoded files to Cuckoo
"""
from __future__ import annotations
import os
from urllib.parse import urljoin, urlparse
import requests
from requests.auth import HTTPBasicAuth
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
cuckoo output
"""
api_user: str
api_passwd: str
url_base: bytes
cuckoo_force: int
def start(self):
"""
Start output plugin
"""
self.url_base = CowrieConfig.get("output_cuckoo", "url_base").encode("utf-8")
self.api_user = CowrieConfig.get("output_cuckoo", "user")
self.api_passwd = CowrieConfig.get("output_cuckoo", "passwd", raw=True)
self.cuckoo_force = int(CowrieConfig.getboolean("output_cuckoo", "force"))
def stop(self):
"""
Stop output plugin
"""
pass
def write(self, entry):
if entry["eventid"] == "cowrie.session.file_download":
log.msg("Sending file to Cuckoo")
p = urlparse(entry["url"]).path
if p == "":
fileName = entry["shasum"]
else:
b = os.path.basename(p)
if b == "":
fileName = entry["shasum"]
else:
fileName = b
if (
self.cuckoo_force
or self.cuckoo_check_if_dup(os.path.basename(entry["outfile"])) is False
):
self.postfile(entry["outfile"], fileName)
elif entry["eventid"] == "cowrie.session.file_upload":
if (
self.cuckoo_force
or self.cuckoo_check_if_dup(os.path.basename(entry["outfile"])) is False
):
log.msg("Sending file to Cuckoo")
self.postfile(entry["outfile"], entry["filename"])
def cuckoo_check_if_dup(self, sha256: str) -> bool:
"""
Check if file already was analyzed by cuckoo
"""
try:
log.msg(f"Looking for tasks for: {sha256}")
res = requests.get(
urljoin(self.url_base, f"/files/view/sha256/{sha256}".encode()),
verify=False,
auth=HTTPBasicAuth(self.api_user, self.api_passwd),
timeout=60,
)
if res and res.ok:
log.msg(
"Sample found in Sandbox, with ID: {}".format(
res.json().get("sample", {}).get("id", 0)
)
)
return True
except Exception as e:
log.msg(e)
return False
def postfile(self, artifact, fileName):
"""
Send a file to Cuckoo
"""
with open(artifact, "rb") as art:
files = {"file": (fileName, art.read())}
try:
res = requests.post(
urljoin(self.url_base, b"tasks/create/file"),
files=files,
auth=HTTPBasicAuth(self.api_user, self.api_passwd),
verify=False,
)
if res and res.ok:
log.msg(
"Cuckoo Request: {}, Task created with ID: {}".format(
res.status_code, res.json()["task_id"]
)
)
else:
log.msg(f"Cuckoo Request failed: {res.status_code}")
except Exception as e:
log.msg(f"Cuckoo Request failed: {e}")
def posturl(self, scanUrl):
"""
Send a URL to Cuckoo
"""
data = {"url": scanUrl}
try:
res = requests.post(
urljoin(self.url_base, b"tasks/create/url"),
data=data,
auth=HTTPBasicAuth(self.api_user, self.api_passwd),
verify=False,
)
if res and res.ok:
log.msg(
"Cuckoo Request: {}, Task created with ID: {}".format(
res.status_code, res.json()["task_id"]
)
)
else:
log.msg(f"Cuckoo Request failed: {res.status_code}")
except Exception as e:
log.msg(f"Cuckoo Request failed: {e}")
| 5,799 | 33.52381 | 88 | py |
cowrie | cowrie-master/src/cowrie/output/malshare.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS`` AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""
Send files to https://malshare.com/
More info https://malshare.com/doc.php
"""
from __future__ import annotations
import os
from urllib.parse import urlparse
import requests
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
malshare output
TODO: use `treq`
"""
apiKey: str
def start(self):
"""
Start output plugin
"""
self.apiKey = CowrieConfig.get("output_malshare", "api_key")
def stop(self):
"""
Stop output plugin
"""
pass
def write(self, entry):
if entry["eventid"] == "cowrie.session.file_download":
p = urlparse(entry["url"]).path
if p == "":
fileName = entry["shasum"]
else:
b = os.path.basename(p)
if b == "":
fileName = entry["shasum"]
else:
fileName = b
self.postfile(entry["outfile"], fileName)
elif entry["eventid"] == "cowrie.session.file_upload":
self.postfile(entry["outfile"], entry["filename"])
def postfile(self, artifact, fileName):
"""
Send a file to MalShare
"""
try:
res = requests.post(
"https://malshare.com/api.php?api_key="
+ self.apiKey
+ "&action=upload",
files={"upload": open(artifact, "rb")},
)
if res and res.ok:
log.msg("Submitted to MalShare")
else:
log.msg(f"MalShare Request failed: {res.status_code}")
except Exception as e:
log.msg(f"MalShare Request failed: {e}")
| 3,275 | 31.117647 | 75 | py |
cowrie | cowrie-master/src/cowrie/output/s3.py | """
Send downloaded/uplaoded files to S3 (or compatible)
"""
from __future__ import annotations
from typing import Any
from configparser import NoOptionError
from botocore.exceptions import ClientError
from botocore.session import get_session
from twisted.internet import defer, threads
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
s3 output
"""
def start(self) -> None:
self.bucket = CowrieConfig.get("output_s3", "bucket")
self.seen: set[str] = set()
self.session = get_session()
try:
if CowrieConfig.get("output_s3", "access_key_id") and CowrieConfig.get(
"output_s3", "secret_access_key"
):
self.session.set_credentials(
CowrieConfig.get("output_s3", "access_key_id"),
CowrieConfig.get("output_s3", "secret_access_key"),
)
except NoOptionError:
log.msg(
"No AWS credentials found in config - using botocore global settings."
)
self.client = self.session.create_client(
"s3",
region_name=CowrieConfig.get("output_s3", "region"),
endpoint_url=CowrieConfig.get("output_s3", "endpoint", fallback=None),
verify=CowrieConfig.getboolean("output_s3", "verify", fallback=True),
)
def stop(self) -> None:
pass
def write(self, entry: dict[str, Any]) -> None:
if entry["eventid"] == "cowrie.session.file_download":
self.upload(entry["shasum"], entry["outfile"])
elif entry["eventid"] == "cowrie.session.file_upload":
self.upload(entry["shasum"], entry["outfile"])
@defer.inlineCallbacks
def _object_exists_remote(self, shasum):
try:
yield threads.deferToThread(
self.client.head_object,
Bucket=self.bucket,
Key=shasum,
)
except ClientError as e:
if e.response["Error"]["Code"] == "404":
defer.returnValue(False)
raise
defer.returnValue(True)
@defer.inlineCallbacks
def upload(self, shasum, filename):
if shasum in self.seen:
log.msg(f"Already uploaded file with sha {shasum} to S3")
return
exists = yield self._object_exists_remote(shasum)
if exists:
log.msg(f"Somebody else already uploaded file with sha {shasum} to S3")
self.seen.add(shasum)
return
log.msg(f"Uploading file with sha {shasum} ({filename}) to S3")
with open(filename, "rb") as fp:
yield threads.deferToThread(
self.client.put_object,
Bucket=self.bucket,
Key=shasum,
Body=fp.read(),
ContentType="application/octet-stream",
)
self.seen.add(shasum)
| 3,019 | 29.505051 | 86 | py |
cowrie | cowrie-master/src/cowrie/output/socketlog.py | from __future__ import annotations
import json
import socket
import cowrie.core.output
from cowrie.core.config import CowrieConfig
class Output(cowrie.core.output.Output):
"""
socketlog output
"""
def start(self):
self.timeout = CowrieConfig.getint("output_socketlog", "timeout")
addr = CowrieConfig.get("output_socketlog", "address")
self.host = addr.split(":")[0]
self.port = int(addr.split(":")[1])
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(self.timeout)
self.sock.connect((self.host, self.port))
def stop(self):
self.sock.close()
def write(self, logentry):
for i in list(logentry.keys()):
# Remove twisted 15 legacy keys
if i.startswith("log_"):
del logentry[i]
message = json.dumps(logentry) + "\n"
try:
self.sock.sendall(message.encode())
except OSError as ex:
if ex.errno == 32: # Broken pipe
self.start()
self.sock.sendall(message.encode())
else:
raise
| 1,156 | 25.906977 | 73 | py |
cowrie | cowrie-master/src/cowrie/output/greynoise.py | """
Send attackers IP to GreyNoise
"""
from __future__ import annotations
import treq
from twisted.internet import defer, error
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
COWRIE_USER_AGENT = "Cowrie Honeypot"
GNAPI_URL = "https://api.greynoise.io/v3/community/"
class Output(cowrie.core.output.Output):
"""
greynoise output
"""
def start(self):
"""
Start output plugin
"""
self.apiKey = CowrieConfig.get("output_greynoise", "api_key", fallback=None)
self.debug = CowrieConfig.getboolean(
"output_greynoise", "debug", fallback=False
)
def stop(self):
"""
Stop output plugin
"""
pass
def write(self, entry):
if entry["eventid"] == "cowrie.session.connect":
self.scanip(entry)
@defer.inlineCallbacks
def scanip(self, entry):
"""
Scan IP against GreyNoise API
"""
def message(query):
if query["noise"]:
log.msg(
eventid="cowrie.greynoise.result",
session=entry["session"],
format=f"GreyNoise: {query['ip']} has been observed scanning the Internet. GreyNoise "
f"classification is {query['classification']} and the believed owner is {query['name']}",
)
if query["riot"]:
log.msg(
eventid="cowrie.greynoise.result",
session=entry["session"],
format=f"GreyNoise: {query['ip']} belongs to a benign service or provider. "
f"The owner is {query['name']}.",
)
gn_url = f"{GNAPI_URL}{entry['src_ip']}".encode()
headers = {"User-Agent": [COWRIE_USER_AGENT], "key": self.apiKey}
try:
response = yield treq.get(url=gn_url, headers=headers, timeout=10)
except (
defer.CancelledError,
error.ConnectingCancelledError,
error.DNSLookupError,
):
log.msg("GreyNoise requests timeout")
return
if response.code == 404:
rsp = yield response.json()
log.err(f"GreyNoise: {rsp['ip']} - {rsp['message']}")
return
if response.code != 200:
rsp = yield response.text()
log.err(f"GreyNoise: got error {rsp}")
return
j = yield response.json()
if self.debug:
log.msg("GreyNoise: debug: " + repr(j))
if j["message"] == "Success":
message(j)
else:
log.msg("GreyNoise: no results for for IP {}".format(entry["src_ip"]))
| 2,749 | 27.947368 | 109 | py |
cowrie | cowrie-master/src/cowrie/core/checkers.py | # Copyright (c) 2009-2014 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
"""
This module contains ...
"""
from __future__ import annotations
from sys import modules
from zope.interface import implementer
from twisted.conch import error
from twisted.conch.ssh import keys
from twisted.cred.checkers import ICredentialsChecker
from twisted.cred.credentials import ISSHPrivateKey
from twisted.cred.error import UnauthorizedLogin, UnhandledCredentials
from twisted.internet import defer
from twisted.python import failure, log
from cowrie.core import auth
from cowrie.core import credentials as conchcredentials
from cowrie.core.config import CowrieConfig
@implementer(ICredentialsChecker)
class HoneypotPublicKeyChecker:
"""
Checker that accepts, logs and denies public key authentication attempts
"""
credentialInterfaces = (ISSHPrivateKey,)
def requestAvatarId(self, credentials):
_pubKey = keys.Key.fromString(credentials.blob)
log.msg(
eventid="cowrie.client.fingerprint",
format="public key attempt for user %(username)s of type %(type)s with fingerprint %(fingerprint)s",
username=credentials.username,
fingerprint=_pubKey.fingerprint(),
key=_pubKey.toString("OPENSSH"),
type=_pubKey.sshType(),
)
return failure.Failure(error.ConchError("Incorrect signature"))
@implementer(ICredentialsChecker)
class HoneypotNoneChecker:
"""
Checker that does no authentication check
"""
credentialInterfaces = (conchcredentials.IUsername,)
def requestAvatarId(self, credentials):
return defer.succeed(credentials.username)
@implementer(ICredentialsChecker)
class HoneypotPasswordChecker:
"""
Checker that accepts "keyboard-interactive" and "password"
"""
credentialInterfaces = (
conchcredentials.IUsernamePasswordIP,
conchcredentials.IPluggableAuthenticationModulesIP,
)
def requestAvatarId(self, credentials):
if hasattr(credentials, "password"):
if self.checkUserPass(
credentials.username, credentials.password, credentials.ip
):
return defer.succeed(credentials.username)
return defer.fail(UnauthorizedLogin())
if hasattr(credentials, "pamConversion"):
return self.checkPamUser(
credentials.username, credentials.pamConversion, credentials.ip
)
return defer.fail(UnhandledCredentials())
def checkPamUser(self, username, pamConversion, ip):
r = pamConversion((("Password:", 1),))
return r.addCallback(self.cbCheckPamUser, username, ip)
def cbCheckPamUser(self, responses, username, ip):
for (response, _) in responses:
if self.checkUserPass(username, response, ip):
return defer.succeed(username)
return defer.fail(UnauthorizedLogin())
def checkUserPass(self, theusername: bytes, thepassword: bytes, ip: str) -> bool:
# UserDB is the default auth_class
authname = auth.UserDB
# Is the auth_class defined in the config file?
if CowrieConfig.has_option("honeypot", "auth_class"):
authclass = CowrieConfig.get("honeypot", "auth_class")
authmodule = "cowrie.core.auth"
# Check if authclass exists in this module
if hasattr(modules[authmodule], authclass):
authname = getattr(modules[authmodule], authclass)
else:
log.msg(f"auth_class: {authclass} not found in {authmodule}")
theauth = authname()
if theauth.checklogin(theusername, thepassword, ip):
log.msg(
eventid="cowrie.login.success",
format="login attempt [%(username)s/%(password)s] succeeded",
username=theusername,
password=thepassword,
)
return True
log.msg(
eventid="cowrie.login.failed",
format="login attempt [%(username)s/%(password)s] failed",
username=theusername,
password=thepassword,
)
return False
| 4,224 | 32.007813 | 112 | py |
cowrie | cowrie-master/src/cowrie/core/auth.py | # Copyright (c) 2009-2014 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
"""
This module contains authentication code
"""
from __future__ import annotations
import json
import re
from collections import OrderedDict
from os import path
from random import randint
from typing import Any, Union
from re import Pattern
from twisted.python import log
from cowrie.core.config import CowrieConfig
_USERDB_DEFAULTS: list[str] = [
"root:x:!root",
"root:x:!123456",
"root:x:!/honeypot/i",
"root:x:*",
"phil:x:*",
"phil:x:fout",
]
class UserDB:
"""
By Walter de Jong <walter@sara.nl>
"""
def __init__(self) -> None:
self.userdb: dict[
tuple[Union[Pattern[bytes], bytes], Union[Pattern[bytes], bytes]], bool
] = OrderedDict()
self.load()
def load(self) -> None:
"""
load the user db
"""
dblines: list[str]
try:
with open(
"{}/userdb.txt".format(CowrieConfig.get("honeypot", "etc_path")),
encoding="ascii",
) as db:
dblines = db.readlines()
except OSError:
log.msg("Could not read etc/userdb.txt, default database activated")
dblines = _USERDB_DEFAULTS
for user in dblines:
if not user.startswith("#"):
try:
login = user.split(":")[0].encode("utf8")
password = user.split(":")[2].strip().encode("utf8")
except IndexError:
continue
else:
self.adduser(login, password)
def checklogin(
self, thelogin: bytes, thepasswd: bytes, src_ip: str = "0.0.0.0"
) -> bool:
for credentials, policy in self.userdb.items():
login: Union[bytes, Pattern[bytes]]
passwd: Union[bytes, Pattern[bytes]]
login, passwd = credentials
if self.match_rule(login, thelogin):
if self.match_rule(passwd, thepasswd):
return policy
return False
def match_rule(
self, rule: Union[bytes, Pattern[bytes]], data: bytes
) -> Union[bool, bytes]:
if isinstance(rule, bytes):
return rule in [b"*", data]
return bool(rule.search(data))
def re_or_bytes(self, rule: bytes) -> Union[Pattern[bytes], bytes]:
"""
Convert a /.../ type rule to a regex, otherwise return the string as-is
@param login: rule
@type login: bytes
"""
res = re.match(rb"/(.+)/(i)?$", rule)
if res:
return re.compile(res.group(1), re.IGNORECASE if res.group(2) else 0)
return rule
def adduser(self, login: bytes, passwd: bytes) -> None:
"""
All arguments are bytes
@param login: user id
@type login: bytes
@param passwd: password
@type passwd: bytes
"""
user = self.re_or_bytes(login)
if passwd[0] == ord("!"):
policy = False
passwd = passwd[1:]
else:
policy = True
p = self.re_or_bytes(passwd)
self.userdb[(user, p)] = policy
class AuthRandom:
"""
Alternative class that defines the checklogin() method.
Users will be authenticated after a random number of attempts.
"""
def __init__(self) -> None:
# Default values
self.mintry: int = 2
self.maxtry: int = 5
self.maxcache: int = 10
# Are there auth_class parameters?
if CowrieConfig.has_option("honeypot", "auth_class_parameters"):
parameters: str = CowrieConfig.get("honeypot", "auth_class_parameters")
parlist: list[str] = parameters.split(",")
if len(parlist) == 3:
self.mintry = int(parlist[0])
self.maxtry = int(parlist[1])
self.maxcache = int(parlist[2])
if self.maxtry < self.mintry:
self.maxtry = self.mintry + 1
log.msg(f"maxtry < mintry, adjusting maxtry to: {self.maxtry}")
self.uservar: dict[Any, Any] = {}
self.uservar_file: str = "{}/auth_random.json".format(
CowrieConfig.get("honeypot", "state_path")
)
self.loadvars()
def loadvars(self) -> None:
"""
Load user vars from json file
"""
if path.isfile(self.uservar_file):
with open(self.uservar_file, encoding="utf-8") as fp:
try:
self.uservar = json.load(fp)
except Exception:
self.uservar = {}
def savevars(self) -> None:
"""
Save the user vars to json file
"""
data = self.uservar
# Note: this is subject to races between cowrie logins
with open(self.uservar_file, "w", encoding="utf-8") as fp:
json.dump(data, fp)
def checklogin(self, thelogin: bytes, thepasswd: bytes, src_ip: str) -> bool:
"""
Every new source IP will have to try a random number of times between
'mintry' and 'maxtry' before succeeding to login.
All username/password combinations must be different.
The successful login combination is stored with the IP address.
Successful username/passwords pairs are also cached for 'maxcache' times.
This is to allow access for returns from different IP addresses.
Variables are saved in 'uservar.json' in the data directory.
"""
auth: bool = False
userpass: str = str(thelogin) + ":" + str(thepasswd)
if "cache" not in self.uservar:
self.uservar["cache"] = []
cache = self.uservar["cache"]
# Check if it is the first visit from src_ip
if src_ip not in self.uservar:
self.uservar[src_ip] = {}
ipinfo = self.uservar[src_ip]
ipinfo["try"] = 0
if userpass in cache:
log.msg(f"first time for {src_ip}, found cached: {userpass}")
ipinfo["max"] = 1
ipinfo["user"] = str(thelogin)
ipinfo["pw"] = str(thepasswd)
auth = True
self.savevars()
return auth
ipinfo["max"] = randint(self.mintry, self.maxtry)
log.msg("first time for {}, need: {}".format(src_ip, ipinfo["max"]))
else:
if userpass in cache:
ipinfo = self.uservar[src_ip]
log.msg(f"Found cached: {userpass}")
ipinfo["max"] = 1
ipinfo["user"] = str(thelogin)
ipinfo["pw"] = str(thepasswd)
auth = True
self.savevars()
return auth
ipinfo = self.uservar[src_ip]
# Fill in missing variables
if "max" not in ipinfo:
ipinfo["max"] = randint(self.mintry, self.maxtry)
if "try" not in ipinfo:
ipinfo["try"] = 0
if "tried" not in ipinfo:
ipinfo["tried"] = []
# Don't count repeated username/password combinations
if userpass in ipinfo["tried"]:
log.msg("already tried this combination")
self.savevars()
return auth
ipinfo["try"] += 1
attempts: int = ipinfo["try"]
need: int = ipinfo["max"]
log.msg(f"login attempt: {attempts}")
# Check if enough login attempts are tried
if attempts < need:
self.uservar[src_ip]["tried"].append(userpass)
elif attempts == need:
ipinfo["user"] = str(thelogin)
ipinfo["pw"] = str(thepasswd)
cache.append(userpass)
if len(cache) > self.maxcache:
cache.pop(0)
auth = True
# Returning after successful login
elif attempts > need:
if "user" not in ipinfo or "pw" not in ipinfo:
log.msg("return, but username or password not set!!!")
ipinfo["tried"].append(userpass)
ipinfo["try"] = 1
else:
log.msg(
"login return, expect: [{}/{}]".format(ipinfo["user"], ipinfo["pw"])
)
if thelogin == ipinfo["user"] and str(thepasswd) == ipinfo["pw"]:
auth = True
self.savevars()
return auth
| 8,448 | 31.003788 | 88 | py |
cowrie | cowrie-master/src/cowrie/core/credentials.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
from collections.abc import Callable
from zope.interface import implementer
from twisted.cred.credentials import ICredentials, IUsernamePassword
class IUsername(ICredentials):
"""
Encapsulate username only
@type username: C{str}
@ivar username: The username associated with these credentials.
"""
class IUsernamePasswordIP(IUsernamePassword):
"""
I encapsulate a username, a plaintext password and a source IP
@type username: C{str}
@ivar username: The username associated with these credentials.
@type password: C{str}
@ivar password: The password associated with these credentials.
@type ip: C{str}
@ivar ip: The source ip address associated with these credentials.
"""
class IPluggableAuthenticationModulesIP(ICredentials):
"""
Twisted removed IPAM in 15, adding in Cowrie now
"""
@implementer(IPluggableAuthenticationModulesIP)
class PluggableAuthenticationModulesIP:
"""
Twisted removed IPAM in 15, adding in Cowrie now
"""
def __init__(self, username: str, pamConversion: Callable, ip: str) -> None:
self.username: str = username
self.pamConversion: Callable = pamConversion
self.ip: str = ip
@implementer(IUsername)
class Username:
def __init__(self, username: str):
self.username: str = username
@implementer(IUsernamePasswordIP)
class UsernamePasswordIP:
"""
This credential interface also provides an IP address
"""
def __init__(self, username: str, password: str, ip: str) -> None:
self.username: str = username
self.password: str = password
self.ip: str = ip
def checkPassword(self, password: str) -> bool:
return self.password == password
| 3,261 | 31.949495 | 80 | py |
cowrie | cowrie-master/src/cowrie/core/utils.py | # -*- test-case-name: cowrie.test.utils -*-
# Copyright (c) 2010-2014 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
from __future__ import annotations
import configparser
from typing import BinaryIO
from twisted.application import internet
from twisted.internet import endpoints
def durationHuman(duration: float) -> str:
"""
Turn number of seconds into human readable string
"""
seconds: int = int(round(duration))
minutes: int
minutes, seconds = divmod(seconds, 60)
hours: int
hours, minutes = divmod(minutes, 60)
days: float
days, hours = divmod(hours, 24)
years: float
years, days = divmod(days, 365.242199)
syears: str = str(years)
sseconds: str = str(seconds).rjust(2, "0")
sminutes: str = str(minutes).rjust(2, "0")
shours: str = str(hours).rjust(2, "0")
sduration: list[str] = []
if years > 0:
sduration.append("{} year{} ".format(syears, "s" * (years != 1)))
else:
if days > 0:
sduration.append("{} day{} ".format(days, "s" * (days != 1)))
if hours > 0:
sduration.append(f"{shours}:")
if minutes >= 0:
sduration.append(f"{sminutes}:")
if seconds >= 0:
sduration.append(f"{sseconds}")
return "".join(sduration)
def tail(the_file: BinaryIO, lines_2find: int = 20) -> list[bytes]:
"""
From http://stackoverflow.com/questions/136168/get-last-n-lines-of-a-file-with-python-similar-to-tail
"""
lines_found: int = 0
total_bytes_scanned: int = 0
the_file.seek(0, 2)
bytes_in_file: int = the_file.tell()
while lines_2find + 1 > lines_found and bytes_in_file > total_bytes_scanned:
byte_block: int = min(1024, bytes_in_file - total_bytes_scanned)
the_file.seek(-(byte_block + total_bytes_scanned), 2)
total_bytes_scanned += byte_block
lines_found += the_file.read(1024).count(b"\n")
the_file.seek(-total_bytes_scanned, 2)
line_list: list[bytes] = list(the_file.readlines())
return line_list[-lines_2find:]
# We read at least 21 line breaks from the bottom, block by block for speed
# 21 to ensure we don't get a half line
def uptime(total_seconds: float) -> str:
"""
Gives a human-readable uptime string
Thanks to http://thesmithfam.org/blog/2005/11/19/python-uptime-script/
(modified to look like the real uptime command)
"""
total_seconds = float(total_seconds)
# Helper vars:
MINUTE: int = 60
HOUR: int = MINUTE * 60
DAY: int = HOUR * 24
# Get the days, hours, etc:
days: int = int(total_seconds / DAY)
hours: int = int((total_seconds % DAY) / HOUR)
minutes: int = int((total_seconds % HOUR) / MINUTE)
# 14 days, 3:53
# 11 min
s: str = ""
if days > 0:
s += str(days) + " " + (days == 1 and "day" or "days") + ", "
if len(s) > 0 or hours > 0:
s += "{}:{}".format(str(hours).rjust(2), str(minutes).rjust(2, "0"))
else:
s += f"{minutes!s} min"
return s
def get_endpoints_from_section(
cfg: configparser.ConfigParser, section: str, default_port: int
) -> list[str]:
listen_addr: str
listen_port: int
listen_endpoints: list[str] = []
if cfg.has_option(section, "listen_endpoints"):
return cfg.get(section, "listen_endpoints").split()
if cfg.has_option(section, "listen_addr"):
listen_addr = cfg.get(section, "listen_addr")
else:
listen_addr = "0.0.0.0"
if cfg.has_option(section, "listen_port"):
listen_port = cfg.getint(section, "listen_port")
else:
listen_port = default_port
for i in listen_addr.split():
listen_endpoints.append(f"tcp:{listen_port}:interface={i}")
return listen_endpoints
def create_endpoint_services(reactor, parent, listen_endpoints, factory):
for listen_endpoint in listen_endpoints:
endpoint = endpoints.serverFromString(reactor, listen_endpoint)
service = internet.StreamServerEndpointService(endpoint, factory)
# FIXME: Use addService on parent ?
service.setServiceParent(parent)
| 4,153 | 30 | 105 | py |
cowrie | cowrie-master/src/cowrie/core/realm.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
from zope.interface import implementer
from twisted.conch.interfaces import IConchUser
from twisted.conch.telnet import ITelnetProtocol
from twisted.cred.portal import IRealm
from cowrie.shell import avatar as shellavatar
from cowrie.shell import server as shellserver
from cowrie.telnet import session
@implementer(IRealm)
class HoneyPotRealm:
def __init__(self) -> None:
pass
def requestAvatar(self, avatarId, _mind, *interfaces):
user: IConchUser
if IConchUser in interfaces:
serv = shellserver.CowrieServer(self)
user = shellavatar.CowrieUser(avatarId, serv)
return interfaces[0], user, user.logout
if ITelnetProtocol in interfaces:
serv = shellserver.CowrieServer(self)
user = session.HoneyPotTelnetSession(avatarId, serv)
return interfaces[0], user, user.logout
raise NotImplementedError
| 2,435 | 41 | 75 | py |
cowrie | cowrie-master/src/cowrie/core/config.py | # Copyright (c) 2009-2014 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
"""
This module contains code to deal with Cowrie's configuration
"""
from __future__ import annotations
import configparser
from os import environ
from os.path import abspath, dirname, exists, join
from typing import Union
def to_environ_key(key: str) -> str:
return key.upper()
class EnvironmentConfigParser(configparser.ConfigParser):
"""
ConfigParser with additional option to read from environment variables
# TODO: def sections()
"""
def has_option(self, section: str, option: str) -> bool:
if to_environ_key("_".join(("cowrie", section, option))) in environ:
return True
return super().has_option(section, option)
def get(self, section: str, option: str, *, raw: bool = False, **kwargs) -> str: # type: ignore
key: str = to_environ_key("_".join(("cowrie", section, option)))
if key in environ:
return environ[key]
return super().get(section, option, raw=raw, **kwargs)
def readConfigFile(cfgfile: Union[list[str], str]) -> configparser.ConfigParser:
"""
Read config files and return ConfigParser object
@param cfgfile: filename or list of filenames
@return: ConfigParser object
"""
parser = EnvironmentConfigParser(interpolation=configparser.ExtendedInterpolation())
parser.read(cfgfile)
return parser
def get_config_path() -> list[str]:
"""
Get absolute path to the config file
"""
current_path = abspath(dirname(__file__))
root = "/".join(current_path.split("/")[:-3])
config_files = [
join(root, "etc/cowrie.cfg.dist"),
"/etc/cowrie/cowrie.cfg",
join(root, "etc/cowrie.cfg"),
join(root, "cowrie.cfg"),
]
found_confs = [path for path in config_files if exists(path)]
if found_confs:
return found_confs
print("Config file not found") # noqa: T201
return []
CowrieConfig = readConfigFile(get_config_path())
| 2,050 | 26.716216 | 100 | py |
cowrie | cowrie-master/src/cowrie/core/artifact.py | # Copyright (c) 2016 Michel Oosterhof <michel@oosterhof.net>
"""
This module contains code to handling saving of honeypot artifacts
These will typically be files uploaded to the honeypot and files
downloaded inside the honeypot, or input being piped in.
Code behaves like a normal Python file handle.
Example:
with Artifact(name) as f:
f.write("abc")
or:
g = Artifact("testme2")
g.write("def")
g.close()
"""
from __future__ import annotations
import hashlib
import os
import tempfile
from types import TracebackType
from typing import Any, Optional
from twisted.python import log
from cowrie.core.config import CowrieConfig
class Artifact:
artifactDir: str = CowrieConfig.get("honeypot", "download_path")
def __init__(self, label: str) -> None:
self.label: str = label
self.fp = tempfile.NamedTemporaryFile(dir=self.artifactDir, delete=False) # pylint: disable=R1732
self.tempFilename = self.fp.name
self.closed: bool = False
self.shasum: str = ""
self.shasumFilename: str = ""
def __enter__(self) -> Any:
return self.fp
def __exit__(
self,
etype: Optional[type[BaseException]],
einst: Optional[BaseException],
etrace: Optional[TracebackType],
) -> bool:
self.close()
return True
def write(self, data: bytes) -> None:
self.fp.write(data)
def fileno(self) -> Any:
return self.fp.fileno()
def close(self, keepEmpty: bool = False) -> Optional[tuple[str, str]]:
size: int = self.fp.tell()
if size == 0 and not keepEmpty:
os.remove(self.fp.name)
return None
self.fp.seek(0)
data = self.fp.read()
self.fp.close()
self.closed = True
self.shasum = hashlib.sha256(data).hexdigest()
self.shasumFilename = os.path.join(self.artifactDir, self.shasum)
if os.path.exists(self.shasumFilename):
log.msg("Not storing duplicate content " + self.shasum)
os.remove(self.fp.name)
else:
os.rename(self.fp.name, self.shasumFilename)
umask = os.umask(0)
os.umask(umask)
os.chmod(self.shasumFilename, 0o666 & ~umask)
return self.shasum, self.shasumFilename
| 2,319 | 24.217391 | 106 | py |
cowrie | cowrie-master/src/cowrie/core/cef.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
# cowrie.client.fingerprint
# cowrie.client.size
# cowrie.client.var
# cowrie.client.version
# cowrie.command.failed
# cowrie.command.success
# cowrie.direct-tcpip.data
# cowrie.direct-tcpip.request
# cowrie.log.closed
# cowrie.login.failed
# cowrie.login.success
# cowrie.session.closed
# cowrie.session.connect
# cowrie.session.file_download
# cowrie.session.file_upload
from __future__ import annotations
def formatCef(logentry: dict[str, str]) -> str:
"""
Take logentry and turn into CEF string
"""
# Jan 18 11:07:53 host CEF:Version|Device Vendor|Device Product|
# Device Version|Signature ID|Name|Severity|[Extension]
cefVendor = "Cowrie"
cefProduct = "Cowrie"
cefVersion = "1.0"
cefSignature = logentry["eventid"]
cefName = logentry["eventid"]
cefSeverity = "5"
cefExtensions = {
"app": "SSHv2",
"destinationServicename": "sshd",
"deviceExternalId": logentry["sensor"],
"msg": logentry["message"],
"src": logentry["src_ip"],
"proto": "tcp",
}
if logentry["eventid"] == "cowrie.session.connect":
cefExtensions["spt"] = logentry["src_port"]
cefExtensions["dpt"] = logentry["dst_port"]
cefExtensions["src"] = logentry["src_ip"]
cefExtensions["dst"] = logentry["dst_ip"]
elif logentry["eventid"] == "cowrie.login.success":
cefExtensions["duser"] = logentry["username"]
cefExtensions["outcome"] = "success"
elif logentry["eventid"] == "cowrie.login.failed":
cefExtensions["duser"] = logentry["username"]
cefExtensions["outcome"] = "failed"
elif logentry["eventid"] == "cowrie.file.file_download":
cefExtensions["filehash"] = logentry["filehash"]
cefExtensions["filePath"] = logentry["filename"]
cefExtensions["fsize"] = logentry["size"]
elif logentry["eventid"] == "cowrie.file.file_upload":
cefExtensions["filehash"] = logentry["filehash"]
cefExtensions["filePath"] = logentry["filename"]
cefExtensions["fsize"] = logentry["size"]
# 'out' 'outcome' request, rt
cefList = []
for key in list(cefExtensions.keys()):
value = str(cefExtensions[key])
cefList.append(f"{key}={value}")
cefExtension = " ".join(cefList)
cefString = (
"CEF:0|"
+ cefVendor
+ "|"
+ cefProduct
+ "|"
+ cefVersion
+ "|"
+ cefSignature
+ "|"
+ cefName
+ "|"
+ cefSeverity
+ "|"
+ cefExtension
)
return cefString
| 4,079 | 33.576271 | 75 | py |
cowrie | cowrie-master/src/cowrie/core/__init__.py | 0 | 0 | 0 | py |
|
cowrie | cowrie-master/src/cowrie/core/output.py | # Copyright (c) 2015 Michel Oosterhof <michel@oosterhof.net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import annotations
import abc
import re
import socket
import time
from os import environ
from typing import Any
from re import Pattern
from twisted.internet import reactor
from twisted.logger import formatTime
from cowrie.core.config import CowrieConfig
# Events:
# cowrie.client.fingerprint
# cowrie.client.size
# cowrie.client.var
# cowrie.client.version
# cowrie.command.input
# cowrie.command.failed
# cowrie.command.success (deprecated)
# cowrie.direct-tcpip.data
# cowrie.direct-tcpip.request
# cowrie.log.closed
# cowrie.login.failed
# cowrie.login.success
# cowrie.session.closed
# cowrie.session.connect
# cowrie.session.file_download
# cowrie.session.file_upload
# The time is available in two formats in each event, as key 'time'
# in epoch format and in key 'timestamp' as a ISO compliant string
# in UTC.
def convert(data):
"""
This converts a nested dictionary with bytes in it to string
"""
if isinstance(data, str):
return data
if isinstance(data, dict):
return {convert(key): convert(value) for key, value in list(data.items())}
if isinstance(data, dict):
return {convert(key): convert(value) for key, value in list(data.items())}
if isinstance(data, list):
return [convert(element) for element in data]
if isinstance(data, bytes):
try:
string = data.decode("utf-8")
except UnicodeDecodeError:
string = repr(data)
return string
return data
class Output(metaclass=abc.ABCMeta):
"""
This is the abstract base class intended to be inherited by
cowrie output plugins. Plugins require the mandatory
methods: stop, start and write
"""
def __init__(self) -> None:
self.sessions: dict[str, str] = {}
self.ips: dict[str, str] = {}
# Need these for each individual transport, or else the session numbers overlap
self.sshRegex: Pattern[str] = re.compile(".*SSHTransport,([0-9]+),[0-9a-f:.]+$")
self.telnetRegex: Pattern[str] = re.compile(
".*TelnetTransport,([0-9]+),[0-9a-f:.]+$"
)
self.sensor: str = CowrieConfig.get(
"honeypot", "sensor_name", fallback=socket.gethostname()
)
self.timeFormat: str
# use Z for UTC (Zulu) time, it's shorter.
if "TZ" in environ and environ["TZ"] == "UTC":
self.timeFormat = "%Y-%m-%dT%H:%M:%S.%fZ"
else:
self.timeFormat = "%Y-%m-%dT%H:%M:%S.%f%z"
# Event trigger so that stop() is called by the reactor when stopping
reactor.addSystemEventTrigger("before", "shutdown", self.stop) # type: ignore
self.start()
def logDispatch(self, **kw: str) -> None:
"""
Use logDispatch when the HoneypotTransport prefix is not available.
Here you can explicitly set the sessionIds to tie the sessions together
"""
ev = kw
# ev["message"] = msg
self.emit(ev)
@abc.abstractmethod
def start(self) -> None:
"""
Abstract method to initialize output plugin
"""
pass
@abc.abstractmethod
def stop(self) -> None:
"""
Abstract method to shut down output plugin
"""
pass
@abc.abstractmethod
def write(self, event: dict[str, Any]) -> None:
"""
Handle a general event within the output plugin
"""
pass
def emit(self, event: dict) -> None:
"""
This is the main emit() hook that gets called by the the Twisted logging
To make this work with Cowrie, the event dictionary needs the following keys:
- 'eventid'
- 'sessionno' or 'session'
- 'message' or 'format'
"""
sessionno: str
ev: dict
# Ignore stdout and stderr in output plugins
if "printed" in event:
return
# Ignore anything without eventid
if "eventid" not in event:
return
# Ignore anything without session information
if (
"sessionno" not in event
and "session" not in event
and "system" not in event
):
return
# Ignore anything without message
if "message" not in event and "format" not in event:
return
ev: dict[str, any] = convert(event) # type: ignore
ev["sensor"] = self.sensor
if "isError" in ev:
del ev["isError"]
# Add ISO timestamp and sensor data
if "time" not in ev:
ev["time"] = time.time()
ev["timestamp"] = formatTime(ev["time"], timeFormat=self.timeFormat)
if "format" in ev and ("message" not in ev or ev["message"] == ()):
try:
ev["message"] = ev["format"] % ev
del ev["format"]
except Exception:
pass
# Explicit sessionno (from logDispatch) overrides from 'system'
if "sessionno" in ev:
sessionno = ev["sessionno"]
del ev["sessionno"]
# Maybe it's passed explicitly
elif "session" in ev:
# reverse engineer sessionno
try:
sessionno = next(
key
for key, value in self.sessions.items()
if value == ev["session"]
)
except StopIteration:
return
# Extract session id from the twisted log prefix
elif "system" in ev:
sessionno = "0"
telnetmatch = self.telnetRegex.match(ev["system"])
if telnetmatch:
sessionno = f"T{telnetmatch.groups()[0]}"
else:
sshmatch = self.sshRegex.match(ev["system"])
if sshmatch:
sessionno = f"S{sshmatch.groups()[0]}"
if sessionno == "0":
return
if sessionno in self.ips:
ev["src_ip"] = self.ips[sessionno]
# Connection event is special. adds to session list
if ev["eventid"] == "cowrie.session.connect":
self.sessions[sessionno] = ev["session"]
self.ips[sessionno] = ev["src_ip"]
else:
ev["session"] = self.sessions[sessionno]
self.write(ev)
# Disconnect is special, remove cached data
if ev["eventid"] == "cowrie.session.closed":
del self.sessions[sessionno]
del self.ips[sessionno]
| 7,999 | 31.653061 | 88 | py |
cowrie | cowrie-master/src/cowrie/core/ttylog.py | # -*- test-case-name: cowrie.test.utils -*-
# Copyright (c) 2009-2014 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
"""
Should be compatible with user mode linux
"""
from __future__ import annotations
import hashlib
import struct
OP_OPEN, OP_CLOSE, OP_WRITE, OP_EXEC = 1, 2, 3, 4
TYPE_INPUT, TYPE_OUTPUT, TYPE_INTERACT = 1, 2, 3
TTYSTRUCT = "<iLiiLL"
def ttylog_open(logfile: str, stamp: float) -> None:
"""
Initialize new tty log
@param logfile: logfile name
@param stamp: timestamp
"""
with open(logfile, "ab") as f:
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack(TTYSTRUCT, OP_OPEN, 0, 0, 0, sec, usec))
def ttylog_write(
logfile: str, length: int, direction: int, stamp: float, data: bytes
) -> None:
"""
Write to tty log
@param logfile: timestamp
@param length: length
@param direction: 0 or 1
@param stamp: timestamp
@param data: data
"""
with open(logfile, "ab") as f:
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack(TTYSTRUCT, OP_WRITE, 0, length, direction, sec, usec))
f.write(data)
def ttylog_close(logfile: str, stamp: float) -> None:
"""
Close tty log
@param logfile: logfile name
@param stamp: timestamp
"""
with open(logfile, "ab") as f:
sec, usec = int(stamp), int(1000000 * (stamp - int(stamp)))
f.write(struct.pack(TTYSTRUCT, OP_CLOSE, 0, 0, 0, sec, usec))
def ttylog_inputhash(logfile: str) -> str:
"""
Create unique hash of the input parts of tty log
@param logfile: logfile name
"""
ssize: int = struct.calcsize(TTYSTRUCT)
inputbytes: bytes = b""
with open(logfile, "rb") as fd:
while 1:
try:
op: int
_tty: int
length: int
direction: int
_sec: int
_usec: int
op, _tty, length, direction, _sec, _usec = struct.unpack(
TTYSTRUCT, fd.read(ssize)
)
data: bytes = fd.read(length)
except struct.error:
break
if op is OP_WRITE and direction is TYPE_OUTPUT:
continue
inputbytes = inputbytes + data
shasum: str = hashlib.sha256(inputbytes).hexdigest()
return shasum
| 2,437 | 25.215054 | 82 | py |
cowrie | cowrie-master/src/cowrie/telnet_proxy/server_transport.py | # Copyright (C) 2015, 2016 GoSecure Inc.
"""
Telnet Transport and Authentication for the Honeypot
@author: Olivier Bilodeau <obilodeau@gosecure.ca>
"""
from __future__ import annotations
import time
import uuid
from twisted.conch.telnet import TelnetTransport
from twisted.internet import reactor
from twisted.internet.endpoints import TCP4ClientEndpoint
from twisted.protocols.policies import TimeoutMixin
from twisted.python import log
from cowrie.core.config import CowrieConfig
from cowrie.telnet_proxy import client_transport
from cowrie.telnet_proxy.handler import TelnetHandler
# object is added for Python 2.7 compatibility (#1198) - as is super with args
class FrontendTelnetTransport(TimeoutMixin, TelnetTransport):
def __init__(self):
super().__init__()
self.peer_ip = None
self.peer_port = 0
self.local_ip = None
self.local_port = 0
self.startTime = None
self.pool_interface = None
self.client = None
self.frontendAuthenticated = False
self.delayedPacketsToBackend = []
# this indicates whether the client effectively connected to the backend
# if they did we recycle the VM, else the VM can be considered "clean"
self.client_used_backend = False
# only used when simple proxy (no pool) set
self.backend_ip = None
self.backend_port = None
self.telnetHandler = TelnetHandler(self)
def connectionMade(self):
self.transportId = uuid.uuid4().hex[:12]
sessionno = self.transport.sessionno
self.peer_ip = self.transport.getPeer().host
self.peer_port = self.transport.getPeer().port + 1
self.local_ip = self.transport.getHost().host
self.local_port = self.transport.getHost().port
log.msg(
eventid="cowrie.session.connect",
format="New connection: %(src_ip)s:%(src_port)s (%(dst_ip)s:%(dst_port)s) [session: %(session)s]",
src_ip=self.transport.getPeer().host,
src_port=self.transport.getPeer().port,
dst_ip=self.transport.getHost().host,
dst_port=self.transport.getHost().port,
session=self.transportId,
sessionno=f"T{sessionno!s}",
protocol="telnet",
)
TelnetTransport.connectionMade(self)
# if we have a pool connect to it and later request a backend, else just connect to a simple backend
# when pool is set we can just test self.pool_interface to the same effect of getting the config
proxy_backend = CowrieConfig.get("proxy", "backend", fallback="simple")
if proxy_backend == "pool":
# request a backend
d = self.factory.pool_handler.request_interface()
d.addCallback(self.pool_connection_success)
d.addErrback(self.pool_connection_error)
else:
# simply a proxy, no pool
backend_ip = CowrieConfig.get("proxy", "backend_telnet_host")
backend_port = CowrieConfig.getint("proxy", "backend_telnet_port")
self.connect_to_backend(backend_ip, backend_port)
def pool_connection_error(self, reason):
log.msg(
f"Connection to backend pool refused: {reason.value}. Disconnecting frontend..."
)
self.transport.loseConnection()
def pool_connection_success(self, pool_interface):
log.msg("Connected to backend pool")
self.pool_interface = pool_interface
self.pool_interface.set_parent(self)
# now request a backend
self.pool_interface.send_vm_request(self.peer_ip)
def received_pool_data(self, operation, status, *data):
if operation == b"r":
honey_ip = data[0]
snapshot = data[1]
telnet_port = data[3]
log.msg(f"Got backend data from pool: {honey_ip.decode()}:{telnet_port}")
log.msg(f"Snapshot file: {snapshot.decode()}")
self.connect_to_backend(honey_ip, telnet_port)
def backend_connection_error(self, reason):
log.msg(
f"Connection to honeypot backend refused: {reason.value}. Disconnecting frontend..."
)
self.transport.loseConnection()
def backend_connection_success(self, backendTransport):
log.msg("Connected to honeypot backend")
self.startTime = time.time()
self.setTimeout(
CowrieConfig.getint("honeypot", "authentication_timeout", fallback=120)
)
def connect_to_backend(self, ip, port):
# connection to the backend starts here
client_factory = client_transport.BackendTelnetFactory()
client_factory.server = self
point = TCP4ClientEndpoint(reactor, ip, port, timeout=20)
d = point.connect(client_factory)
d.addCallback(self.backend_connection_success)
d.addErrback(self.backend_connection_error)
def dataReceived(self, data: bytes) -> None:
self.telnetHandler.addPacket("frontend", data)
def write(self, data):
self.transport.write(data)
def timeoutConnection(self):
"""
Make sure all sessions time out eventually.
Timeout is reset when authentication succeeds.
"""
log.msg("Timeout reached in FrontendTelnetTransport")
# close transports on both sides
if self.transport:
self.transport.loseConnection()
if self.client and self.client.transport:
self.client.transport.loseConnection()
# signal that we're closing to the handler
self.telnetHandler.close()
def connectionLost(self, reason):
"""
Fires on pre-authentication disconnects
"""
self.setTimeout(None)
TelnetTransport.connectionLost(self, reason)
# close transport on backend
if self.client and self.client.transport:
self.client.transport.loseConnection()
# signal that we're closing to the handler
self.telnetHandler.close()
if self.pool_interface:
# free VM from pool (VM was used if auth was performed successfully)
self.pool_interface.send_vm_free(self.telnetHandler.authDone)
# close transport connection to pool
self.pool_interface.transport.loseConnection()
if self.startTime is not None: # startTime is not set when auth fails
duration = time.time() - self.startTime
log.msg(
eventid="cowrie.session.closed",
format="Connection lost after %(duration)d seconds",
duration=duration,
)
def packet_buffer(self, payload):
"""
We have to wait until we have a connection to the backend ready. Meanwhile, we hold packets from client
to server in here.
"""
if not self.client.backendConnected:
# wait till backend connects to send packets to them
log.msg("Connection to backend not ready, buffering packet from frontend")
self.delayedPacketsToBackend.append(payload)
else:
if len(self.delayedPacketsToBackend) > 0:
self.delayedPacketsToBackend.append(payload)
else:
self.client.transport.write(payload)
| 7,303 | 34.629268 | 111 | py |
cowrie | cowrie-master/src/cowrie/telnet_proxy/client_transport.py | # Copyright (c) 2019 Guilherme Borges <guilhermerosasborges@gmail.com>
# All rights reserved.
from __future__ import annotations
from twisted.conch.telnet import TelnetTransport
from twisted.internet import protocol
from twisted.protocols.policies import TimeoutMixin
from twisted.python import log
class BackendTelnetTransport(TelnetTransport, TimeoutMixin):
def __init__(self):
# self.delayedPacketsToFrontend = []
self.backendConnected = False
self.telnetHandler = None
super().__init__()
def connectionMade(self):
log.msg(f"Connected to Telnet backend at {self.transport.getPeer().host}")
self.telnetHandler = self.factory.server.telnetHandler
self.telnetHandler.setClient(self)
self.backendConnected = True
self.factory.server.client = self
for packet in self.factory.server.delayedPacketsToBackend:
self.transport.write(packet)
self.factory.server.delayedPacketsToBackend = []
super(TelnetTransport, self).connectionMade()
# TODO timeout if no backend available
def connectionLost(self, reason):
# close transport on frontend
self.factory.server.loseConnection()
# signal that we're closing to the handler
self.telnetHandler.close()
def timeoutConnection(self):
"""
Make sure all sessions time out eventually.
Timeout is reset when authentication succeeds.
"""
log.msg("Timeout reached in BackendTelnetTransport")
# close transports on both sides
self.transport.loseConnection()
self.factory.server.transport.loseConnection()
# signal that we're closing to the handler
self.telnetHandler.close()
def dataReceived(self, data):
self.telnetHandler.addPacket("backend", data)
def write(self, data):
self.transport.write(data)
def packet_buffer(self, payload):
"""
We can only proceed if authentication has been performed between client and proxy.
Meanwhile we hold packets in here.
"""
self.factory.server.transport.write(payload)
class BackendTelnetFactory(protocol.ClientFactory):
protocol = BackendTelnetTransport
| 2,253 | 30.746479 | 90 | py |
cowrie | cowrie-master/src/cowrie/telnet_proxy/__init__.py | 0 | 0 | 0 | py |
|
cowrie | cowrie-master/src/cowrie/telnet_proxy/handler.py | from __future__ import annotations
import os
import re
import time
from twisted.python import log
from cowrie.core import ttylog
from cowrie.core.checkers import HoneypotPasswordChecker
from cowrie.core.config import CowrieConfig
def process_backspaces(s: bytes) -> bytes:
"""
Takes a user-input string that might have backspaces in it (represented as 0x7F),
and actually performs the 'backspace operation' to return a clean string.
"""
n = b""
for i in range(len(s)):
char = chr(s[i]).encode()
if char == b"\x7f":
n = n[:-1]
else:
n += char
return n
def remove_all(original_string: bytes, remove_list: list[bytes]) -> bytes:
"""
Removes all substrings in the list remove_list from string original_string.
"""
n = original_string
for substring in remove_list:
n = n.replace(substring, b"")
return n
class TelnetHandler:
def __init__(self, server):
# holds packet data; useful to manipulate it across functions as needed
self.currentData: bytes = b""
self.sendData = True
# front and backend references
self.server = server
self.client = None
# definitions from config
self.spoofAuthenticationData = CowrieConfig.getboolean(
"proxy", "telnet_spoof_authentication"
)
self.backendLogin = CowrieConfig.get("proxy", "backend_user").encode()
self.backendPassword = CowrieConfig.get("proxy", "backend_pass").encode()
self.usernameInNegotiationRegex = CowrieConfig.get(
"proxy", "telnet_username_in_negotiation_regex", raw=True
).encode()
self.usernamePromptRegex = CowrieConfig.get(
"proxy", "telnet_username_prompt_regex", raw=True
).encode()
self.passwordPromptRegex = CowrieConfig.get(
"proxy", "telnet_password_prompt_regex", raw=True
).encode()
# telnet state
self.currentCommand = b""
# auth state
self.authStarted = False
self.authDone = False
self.usernameState = b"" # TODO clear on end
self.inputingLogin = False
self.passwordState = b"" # TODO clear on end
self.inputingPassword = False
self.waitingLoginEcho = False
# some data is sent by the backend right before the password prompt, we want to capture that
# and the respective frontend response and send it before starting to intercept auth data
self.prePasswordData = False
# buffer
self.backend_buffer = []
# tty logging
self.startTime = time.time()
self.ttylogPath = CowrieConfig.get("honeypot", "ttylog_path")
self.ttylogEnabled = CowrieConfig.getboolean(
"honeypot", "ttylog", fallback=True
)
self.ttylogSize = 0
if self.ttylogEnabled:
self.ttylogFile = "{}/telnet-{}.log".format(
self.ttylogPath, time.strftime("%Y%m%d-%H%M%S")
)
ttylog.ttylog_open(self.ttylogFile, self.startTime)
def setClient(self, client):
self.client = client
def close(self):
if self.ttylogEnabled:
ttylog.ttylog_close(self.ttylogFile, time.time())
shasum = ttylog.ttylog_inputhash(self.ttylogFile)
shasumfile = os.path.join(self.ttylogPath, shasum)
if os.path.exists(shasumfile):
duplicate = True
os.remove(self.ttylogFile)
else:
duplicate = False
os.rename(self.ttylogFile, shasumfile)
umask = os.umask(0)
os.umask(umask)
os.chmod(shasumfile, 0o666 & ~umask)
self.ttylogEnabled = (
False # do not close again if function called after closing
)
log.msg(
eventid="cowrie.log.closed",
format="Closing TTY Log: %(ttylog)s after %(duration)d seconds",
ttylog=shasumfile,
size=self.ttylogSize,
shasum=shasum,
duplicate=duplicate,
duration=time.time() - self.startTime,
)
def sendBackend(self, data: bytes) -> None:
self.backend_buffer.append(data)
if not self.client:
return
for packet in self.backend_buffer:
self.client.transport.write(packet)
# log raw packets if user sets so
if CowrieConfig.getboolean("proxy", "log_raw", fallback=False):
log.msg("to_backend - " + data.decode("unicode-escape"))
if self.ttylogEnabled and self.authStarted:
cleanData = data.replace(
b"\x00", b"\n"
) # some frontends send 0xFF instead of newline
ttylog.ttylog_write(
self.ttylogFile,
len(cleanData),
ttylog.TYPE_INPUT,
time.time(),
cleanData,
)
self.ttylogSize += len(cleanData)
self.backend_buffer = self.backend_buffer[1:]
def sendFrontend(self, data: bytes) -> None:
self.server.transport.write(data)
# log raw packets if user sets so
if CowrieConfig.getboolean("proxy", "log_raw", fallback=False):
log.msg("to_frontend - " + data.decode("unicode-escape"))
if self.ttylogEnabled and self.authStarted:
ttylog.ttylog_write(
self.ttylogFile, len(data), ttylog.TYPE_OUTPUT, time.time(), data
)
# self.ttylogSize += len(data)
def addPacket(self, parent: str, data: bytes) -> None:
self.currentData = data
self.sendData = True
if self.spoofAuthenticationData and not self.authDone:
# detect prompts from backend
if parent == "backend":
self.setProcessingStateBackend()
# detect patterns from frontend
if parent == "frontend":
self.setProcessingStateFrontend()
# save user inputs from frontend
if parent == "frontend":
if self.inputingPassword:
self.processPasswordInput()
if self.inputingLogin:
self.processUsernameInput()
# capture username echo from backend
if self.waitingLoginEcho and parent == "backend":
self.currentData = self.currentData.replace(
self.backendLogin + b"\r\n", b""
)
self.waitingLoginEcho = False
# log user commands
if parent == "frontend" and self.authDone:
self.currentCommand += data.replace(b"\r\x00", b"").replace(b"\r\n", b"")
# check if a command has terminated
if b"\r" in data:
if len(self.currentCommand) > 0:
log.msg(
eventid="cowrie.command.input",
input=self.currentCommand,
format="CMD: %(input)s",
)
self.currentCommand = b""
# send data after processing (also check if processing did not reduce it to an empty string)
if self.sendData and len(self.currentData):
if parent == "frontend":
self.sendBackend(self.currentData)
else:
self.sendFrontend(self.currentData)
def processUsernameInput(self) -> None:
self.sendData = False # withold data until input is complete
# remove control characters
control_chars = [b"\r", b"\x00", b"\n"]
self.usernameState += remove_all(self.currentData, control_chars)
# backend echoes data back to user to show on terminal prompt
# - NULL char is replaced by NEWLINE by backend
# - 0x7F (backspace) is replaced by two 0x08 separated by a blankspace
self.sendFrontend(
self.currentData.replace(b"\x7f", b"\x08 \x08").replace(b"\x00", b"\n")
)
# check if done inputing
if b"\r" in self.currentData:
terminatingChar = chr(
self.currentData[self.currentData.index(b"\r") + 1]
).encode() # usually \n or \x00
# cleanup
self.usernameState = process_backspaces(self.usernameState)
log.msg(f"User input login: {self.usernameState.decode('unicode-escape')}")
self.inputingLogin = False
# actually send to backend
self.currentData = self.backendLogin + b"\r" + terminatingChar
self.sendData = True
# we now have to ignore the username echo from the backend in the next packet
self.waitingLoginEcho = True
def processPasswordInput(self) -> None:
self.sendData = False # withold data until input is complete
if self.prePasswordData:
self.sendBackend(self.currentData[:3])
self.prePasswordData = False
# remove control characters
control_chars = [b"\xff", b"\xfd", b"\x01", b"\r", b"\x00", b"\n"]
self.passwordState += remove_all(self.currentData, control_chars)
# check if done inputing
if b"\r" in self.currentData:
terminatingChar = chr(
self.currentData[self.currentData.index(b"\r") + 1]
).encode() # usually \n or \x00
# cleanup
self.passwordState = process_backspaces(self.passwordState)
log.msg(
f"User input password: {self.passwordState.decode('unicode-escape')}"
)
self.inputingPassword = False
# having the password (and the username, either empy or set before), we can check the login
# on the database, and if valid authenticate or else, if invalid send a fake password to get
# the login failed prompt
src_ip = self.server.transport.getPeer().host
if HoneypotPasswordChecker().checkUserPass(
self.usernameState, self.passwordState, src_ip
):
passwordToSend = self.backendPassword
self.authDone = True
self.server.setTimeout(
CowrieConfig.getint("honeypot", "interactive_timeout", fallback=300)
)
else:
log.msg("Sending invalid auth to backend")
passwordToSend = self.backendPassword + b"fake"
# actually send to backend
self.currentData = passwordToSend + b"\r" + terminatingChar
self.sendData = True
def setProcessingStateBackend(self) -> None:
"""
This function analyses a data packet and sets the processing state of the handler accordingly.
It looks for authentication phases (password input and username input), as well as data that
may need to be processed specially.
"""
hasPassword = re.search(self.passwordPromptRegex, self.currentData)
if hasPassword:
log.msg("Password prompt from backend")
self.authStarted = True
self.inputingPassword = True
self.passwordState = b""
hasLogin = re.search(self.usernamePromptRegex, self.currentData)
if hasLogin:
log.msg("Login prompt from backend")
self.authStarted = True
self.inputingLogin = True
self.usernameState = b""
self.prePasswordData = b"\xff\xfb\x01" in self.currentData
def setProcessingStateFrontend(self) -> None:
"""
Same for the frontend.
"""
# login username is sent in channel negotiation to match the client's username
negotiationLoginPattern = re.compile(self.usernameInNegotiationRegex)
hasNegotiationLogin = negotiationLoginPattern.search(self.currentData)
if hasNegotiationLogin:
self.usernameState = hasNegotiationLogin.group(2)
log.msg(
f"Detected username {self.usernameState.decode('unicode-escape')} in negotiation, spoofing for backend..."
)
# spoof username in data sent
# username is always sent correct, password is the one sent wrong if we don't want to authenticate
self.currentData = negotiationLoginPattern.sub(
rb"\1" + self.backendLogin + rb"\3", self.currentData
)
| 12,570 | 35.75731 | 122 | py |
cowrie | cowrie-master/src/cowrie/python/__init__.py | 0 | 0 | 0 | py |
|
cowrie | cowrie-master/src/cowrie/python/logfile.py | # -*- test-case-name: cowrie.test.utils -*-
# Copyright (c) 2017 Michel Oosterhof <michel@oosterhof.net>
# See the COPYRIGHT file for more information
from __future__ import annotations
from os import environ
from twisted.logger import textFileLogObserver
from twisted.python import logfile
from cowrie.core.config import CowrieConfig
class CowrieDailyLogFile(logfile.DailyLogFile):
"""
Overload original Twisted with improved date formatting
"""
def suffix(self, tupledate):
"""
Return the suffix given a (year, month, day) tuple or unixtime
"""
try:
return "{:02d}-{:02d}-{:02d}".format(
tupledate[0], tupledate[1], tupledate[2]
)
except Exception:
# try taking a float unixtime
return "_".join(map(str, self.toDate(tupledate)))
def logger():
directory = CowrieConfig.get("honeypot", "log_path", fallback="var/log/cowrie")
logfile = CowrieDailyLogFile("cowrie.log", directory)
# use Z for UTC (Zulu) time, it's shorter.
if "TZ" in environ and environ["TZ"] == "UTC":
timeFormat = "%Y-%m-%dT%H:%M:%S.%fZ"
else:
timeFormat = "%Y-%m-%dT%H:%M:%S.%f%z"
return textFileLogObserver(logfile, timeFormat=timeFormat)
| 1,281 | 27.488889 | 83 | py |
cowrie | cowrie-master/src/cowrie/telnet/userauth.py | # Copyright (C) 2015, 2016 GoSecure Inc.
"""
Telnet Transport and Authentication for the Honeypot
@author: Olivier Bilodeau <obilodeau@gosecure.ca>
"""
from __future__ import annotations
import struct
from twisted.conch.telnet import (
ECHO,
LINEMODE,
NAWS,
SGA,
AuthenticatingTelnetProtocol,
ITelnetProtocol,
)
from twisted.python import log
from cowrie.core.config import CowrieConfig
from cowrie.core.credentials import UsernamePasswordIP
class HoneyPotTelnetAuthProtocol(AuthenticatingTelnetProtocol):
"""
TelnetAuthProtocol that takes care of Authentication. Once authenticated this
protocol is replaced with HoneyPotTelnetSession.
"""
loginPrompt = b"login: "
passwordPrompt = b"Password: "
windowSize = [40, 80]
def connectionMade(self):
# self.transport.negotiationMap[NAWS] = self.telnet_NAWS
# Initial option negotation. Want something at least for Mirai
# for opt in (NAWS,):
# self.transport.doChain(opt).addErrback(log.err)
# I need to doubly escape here since my underlying
# CowrieTelnetTransport hack would remove it and leave just \n
self.transport.write(self.factory.banner.replace(b"\n", b"\r\r\n"))
self.transport.write(self.loginPrompt)
def connectionLost(self, reason):
"""
Fires on pre-authentication disconnects
"""
AuthenticatingTelnetProtocol.connectionLost(self, reason)
def telnet_User(self, line):
"""
Overridden to conditionally kill 'WILL ECHO' which confuses clients
that don't implement a proper Telnet protocol (most malware)
"""
self.username = line # .decode()
# only send ECHO option if we are chatting with a real Telnet client
self.transport.willChain(ECHO)
# FIXME: this should be configurable or provided via filesystem
self.transport.write(self.passwordPrompt)
return "Password"
def telnet_Password(self, line):
username, password = self.username, line # .decode()
del self.username
def login(ignored):
self.src_ip = self.transport.getPeer().host
creds = UsernamePasswordIP(username, password, self.src_ip)
d = self.portal.login(creds, self.src_ip, ITelnetProtocol)
d.addCallback(self._cbLogin)
d.addErrback(self._ebLogin)
# are we dealing with a real Telnet client?
if self.transport.options:
# stop ECHO
# even if ECHO negotiation fails we still want to attempt a login
# this allows us to support dumb clients which is common in malware
# thus the addBoth: on success and on exception (AlreadyNegotiating)
self.transport.wontChain(ECHO).addBoth(login)
else:
# process login
login("")
return "Discard"
def telnet_Command(self, command):
self.transport.protocol.dataReceived(command + b"\r")
return "Command"
def _cbLogin(self, ial):
"""
Fired on a successful login
"""
interface, protocol, logout = ial
protocol.windowSize = self.windowSize
self.protocol = protocol
self.logout = logout
self.state = "Command"
self.transport.write(b"\n")
# Remove the short timeout of the login prompt.
self.transport.setTimeout(
CowrieConfig.getint("honeypot", "interactive_timeout", fallback=300)
)
# replace myself with avatar protocol
protocol.makeConnection(self.transport)
self.transport.protocol = protocol
def _ebLogin(self, failure):
# TODO: provide a way to have user configurable strings for wrong password
self.transport.wontChain(ECHO)
self.transport.write(b"\nLogin incorrect\n")
self.transport.write(self.loginPrompt)
self.state = "User"
def telnet_NAWS(self, data):
"""
From TelnetBootstrapProtocol in twisted/conch/telnet.py
"""
if len(data) == 4:
width, height = struct.unpack("!HH", b"".join(data))
self.windowSize = [height, width]
else:
log.msg("Wrong number of NAWS bytes")
def enableLocal(self, opt):
if opt == ECHO:
return True
# TODO: check if twisted now supports SGA (see git commit c58056b0)
elif opt == SGA:
return False
else:
return False
def enableRemote(self, opt):
# TODO: check if twisted now supports LINEMODE (see git commit c58056b0)
if opt == LINEMODE:
return False
elif opt == NAWS:
return True
elif opt == SGA:
return True
else:
return False
| 4,827 | 31.186667 | 82 | py |
cowrie | cowrie-master/src/cowrie/telnet/factory.py | # Copyright (C) 2015, 2016 GoSecure Inc.
"""
Telnet Transport and Authentication for the Honeypot
@author: Olivier Bilodeau <obilodeau@gosecure.ca>
"""
from __future__ import annotations
import time
from twisted.cred import portal as tp
from twisted.internet import protocol
from twisted.plugin import IPlugin
from twisted.python import log
from cowrie.core.config import CowrieConfig
from cowrie.telnet.transport import CowrieTelnetTransport
from cowrie.telnet.userauth import HoneyPotTelnetAuthProtocol
from cowrie.telnet_proxy.server_transport import FrontendTelnetTransport
class HoneyPotTelnetFactory(protocol.ServerFactory):
"""
This factory creates HoneyPotTelnetAuthProtocol instances
They listen directly to the TCP port
"""
tac: IPlugin
portal: tp.Portal | None = None # gets set by Twisted plugin
banner: bytes
starttime: float
def __init__(self, backend, pool_handler):
self.backend: str = backend
self.pool_handler = pool_handler
super().__init__()
# TODO logging clarity can be improved: see what SSH does
def logDispatch(self, **args):
"""
Special delivery to the loggers to avoid scope problems
"""
args["sessionno"] = "T{}".format(str(args["sessionno"]))
for output in self.tac.output_plugins:
output.logDispatch(**args)
def startFactory(self):
try:
honeyfs = CowrieConfig.get("honeypot", "contents_path")
issuefile = honeyfs + "/etc/issue.net"
with open(issuefile, "rb") as banner:
self.banner = banner.read()
except OSError:
self.banner = b""
# For use by the uptime command
self.starttime = time.time()
# hook protocol
if self.backend == "proxy":
self.protocol = lambda: FrontendTelnetTransport()
else:
self.protocol = lambda: CowrieTelnetTransport(
HoneyPotTelnetAuthProtocol, self.portal
)
protocol.ServerFactory.startFactory(self)
log.msg("Ready to accept Telnet connections")
def stopFactory(self) -> None:
"""
Stop output plugins
"""
protocol.ServerFactory.stopFactory(self)
def buildProtocol(self, addr):
"""
Overidden so we can keep a reference to running protocols (which is used for testing)
"""
p = self.protocol()
p.factory = self
return p
| 2,489 | 28.294118 | 93 | py |
cowrie | cowrie-master/src/cowrie/telnet/session.py | # Copyright (C) 2015, 2016 GoSecure Inc.
"""
Telnet User Session management for the Honeypot
@author: Olivier Bilodeau <obilodeau@gosecure.ca>
"""
from __future__ import annotations
import traceback
from zope.interface import implementer
from twisted.conch.ssh import session
from twisted.conch.telnet import ECHO, SGA, TelnetBootstrapProtocol
from twisted.internet import interfaces, protocol
from twisted.python import log
from cowrie.insults import insults
from cowrie.shell import protocol as cproto
from cowrie.shell import pwd
class HoneyPotTelnetSession(TelnetBootstrapProtocol):
id = 0 # telnet can only have 1 simultaneous session, unlike SSH
windowSize = [40, 80]
# to be populated by HoneyPotTelnetAuthProtocol after auth
transportId = None
def __init__(self, username, server):
self.username = username.decode()
self.server = server
try:
pwentry = pwd.Passwd().getpwnam(self.username)
self.uid = pwentry["pw_uid"]
self.gid = pwentry["pw_gid"]
self.home = pwentry["pw_dir"]
except KeyError:
self.uid = 1001
self.gid = 1001
self.home = "/home"
self.environ = {
"LOGNAME": self.username,
"USER": self.username,
"SHELL": "/bin/bash",
"HOME": self.home,
"TMOUT": "1800",
}
if self.uid == 0:
self.environ[
"PATH"
] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
else:
self.environ[
"PATH"
] = "/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games"
# required because HoneyPotBaseProtocol relies on avatar.avatar.home
self.avatar = self
# Do the delayed file system initialization
self.server.initFileSystem(self.home)
def connectionMade(self):
processprotocol = TelnetSessionProcessProtocol(self)
# If we are dealing with a proper Telnet client: enable server echo
if self.transport.options:
self.transport.willChain(SGA)
self.transport.willChain(ECHO)
self.protocol = insults.LoggingTelnetServerProtocol(
cproto.HoneyPotInteractiveTelnetProtocol, self
)
# somewhere in Twisted this exception gets lost. Log explicitly here
try:
self.protocol.makeConnection(processprotocol)
processprotocol.makeConnection(session.wrapProtocol(self.protocol))
except Exception:
log.msg(traceback.format_exc())
def connectionLost(self, reason):
TelnetBootstrapProtocol.connectionLost(self, reason)
self.server = None
self.avatar = None
self.protocol = None
def logout(self):
log.msg(f"avatar {self.username} logging out")
# Taken and adapted from
# https://github.com/twisted/twisted/blob/26ad16ab41db5f0f6d2526a891e81bbd3e260247/twisted/conch/ssh/session.py#L186
@implementer(interfaces.ITransport)
class TelnetSessionProcessProtocol(protocol.ProcessProtocol):
"""
I am both an L{IProcessProtocol} and an L{ITransport}.
I am a transport to the remote endpoint and a process protocol to the
local subsystem.
"""
def __init__(self, sess):
self.session = sess
self.lostOutOrErrFlag = False
def outReceived(self, data: bytes) -> None:
self.session.write(data)
def errReceived(self, data: bytes) -> None:
log.msg(f"Error received: {data.decode()}")
# EXTENDED_DATA_STDERR is from ssh, no equivalent in telnet?
# self.session.writeExtended(connection.EXTENDED_DATA_STDERR, err)
def outConnectionLost(self) -> None:
"""
EOF should only be sent when both STDOUT and STDERR have been closed.
"""
if self.lostOutOrErrFlag:
self.session.conn.sendEOF(self.session)
else:
self.lostOutOrErrFlag = True
def errConnectionLost(self) -> None:
"""
See outConnectionLost().
"""
self.outConnectionLost()
def connectionLost(self, reason=None):
self.session.loseConnection()
self.session = None
def processEnded(self, reason=None):
"""
here SSH is doing signal handling, I don't think telnet supports that so
I'm simply going to bail out
"""
log.msg(f"Process ended. Telnet Session disconnected: {reason}")
self.session.loseConnection()
def getHost(self):
"""
Return the host from my session's transport.
"""
return self.session.transport.getHost()
def getPeer(self):
"""
Return the peer from my session's transport.
"""
return self.session.transport.getPeer()
def write(self, data):
self.session.write(data)
def writeSequence(self, seq):
self.session.write(b"".join(seq))
def loseConnection(self):
self.session.loseConnection()
| 5,044 | 29.575758 | 116 | py |
cowrie | cowrie-master/src/cowrie/telnet/__init__.py | 0 | 0 | 0 | py |
|
cowrie | cowrie-master/src/cowrie/telnet/transport.py | # Copyright (C) 2015, 2016 GoSecure Inc.
"""
Telnet Transport and Authentication for the Honeypot
@author: Olivier Bilodeau <obilodeau@gosecure.ca>
"""
from __future__ import annotations
import time
import uuid
from twisted.conch.telnet import AlreadyNegotiating, TelnetTransport
from twisted.protocols.policies import TimeoutMixin
from twisted.python import log
from cowrie.core.config import CowrieConfig
class CowrieTelnetTransport(TelnetTransport, TimeoutMixin):
"""
CowrieTelnetTransport
"""
def connectionMade(self):
self.transportId: str = uuid.uuid4().hex[:12]
sessionno = self.transport.sessionno
self.startTime = time.time()
self.setTimeout(
CowrieConfig.getint("honeypot", "authentication_timeout", fallback=120)
)
log.msg(
eventid="cowrie.session.connect",
format="New connection: %(src_ip)s:%(src_port)s (%(dst_ip)s:%(dst_port)s) [session: %(session)s]",
src_ip=self.transport.getPeer().host,
src_port=self.transport.getPeer().port,
dst_ip=self.transport.getHost().host,
dst_port=self.transport.getHost().port,
session=self.transportId,
sessionno=f"T{sessionno!s}",
protocol="telnet",
)
TelnetTransport.connectionMade(self)
def write(self, data):
"""
Because of the presence of two ProtocolTransportMixin in the protocol
stack once authenticated, I need to override write() and remove a \r
otherwise we end up with \r\r\n on the wire.
It is kind of a hack. I asked for a better solution here:
http://stackoverflow.com/questions/35087250/twisted-telnet-server-how-to-avoid-nested-crlf
"""
self.transport.write(data.replace(b"\r\n", b"\n"))
def timeoutConnection(self):
"""
Make sure all sessions time out eventually.
Timeout is reset when authentication succeeds.
"""
log.msg("Timeout reached in CowrieTelnetTransport")
self.transport.loseConnection()
def connectionLost(self, reason):
"""
Fires on pre-authentication disconnects
"""
self.setTimeout(None)
TelnetTransport.connectionLost(self, reason)
duration = time.time() - self.startTime
log.msg(
eventid="cowrie.session.closed",
format="Connection lost after %(duration)d seconds",
duration=duration,
)
def willChain(self, option):
return self._chainNegotiation(None, self.will, option)
def wontChain(self, option):
return self._chainNegotiation(None, self.wont, option)
def doChain(self, option):
return self._chainNegotiation(None, self.do, option)
def dontChain(self, option):
return self._chainNegotiation(None, self.dont, option)
def _handleNegotiationError(self, f, func, option):
if f.type is AlreadyNegotiating:
s = self.getOptionState(option)
if func in (self.do, self.dont):
s.him.onResult.addCallback(self._chainNegotiation, func, option)
s.him.onResult.addErrback(self._handleNegotiationError, func, option)
if func in (self.will, self.wont):
s.us.onResult.addCallback(self._chainNegotiation, func, option)
s.us.onResult.addErrback(self._handleNegotiationError, func, option)
# We only care about AlreadyNegotiating, everything else can be ignored
# Possible other types include OptionRefused, AlreadyDisabled, AlreadyEnabled, ConnectionDone, ConnectionLost
elif f.type is AssertionError:
log.msg(
"Client tried to illegally refuse to disable an option; ignoring, but undefined behavior may result"
)
# TODO: Is ignoring this violation of the protocol the proper behavior?
# Should the connection be terminated instead?
# The telnetd package on Ubuntu (netkit-telnet) does all negotiation before sending the login prompt,
# but does handle client-initiated negotiation at any time.
def _chainNegotiation(self, res, func, option):
return func(option).addErrback(self._handleNegotiationError, func, option)
| 4,310 | 37.491071 | 117 | py |
cowrie | cowrie-master/src/cowrie/test/test_tftp.py | # Copyright (c) 2018 Michel Oosterhof
# See LICENSE for details.
from __future__ import annotations
import os
import unittest
from cowrie.shell.protocol import HoneyPotInteractiveProtocol
from cowrie.test.fake_server import FakeAvatar, FakeServer
from cowrie.test.fake_transport import FakeTransport
os.environ["COWRIE_HONEYPOT_DATA_PATH"] = "data"
os.environ["COWRIE_HONEYPOT_DOWNLOAD_PATH"] = "/tmp"
os.environ["COWRIE_SHELL_FILESYSTEM"] = "share/cowrie/fs.pickle"
PROMPT = b"root@unitTest:~# "
class ShellTftpCommandTests(unittest.TestCase):
"""Tests for cowrie/commands/tftp.py."""
def setUp(self) -> None:
self.proto = HoneyPotInteractiveProtocol(FakeAvatar(FakeServer()))
self.tr = FakeTransport("", "31337")
self.proto.makeConnection(self.tr)
self.tr.clear()
def tearDown(self) -> None:
self.proto.connectionLost("tearDown From Unit Test")
def test_echo_command_001(self) -> None:
self.proto.lineReceived(b"tftp\n")
self.assertEqual(
self.tr.value(),
b"usage: tftp [-h] [-c C C] [-l L] [-g G] [-p P] [-r R] [hostname]\n"
+ PROMPT,
)
| 1,165 | 29.684211 | 81 | py |
cowrie | cowrie-master/src/cowrie/test/fake_transport.py | # Copyright (c) 2016 Dave Germiquet
# See LICENSE for details.
from __future__ import annotations
from collections.abc import Callable
from twisted.conch.insults import insults
from twisted.test import proto_helpers
class Container:
"""This class is placeholder for creating a fake interface.
@var host Client fake information
@var port Fake Port for connection
@var otherVersionString version
"""
otherVersionString = "1.0"
transportId = "test-suite"
id = "test-suite"
sessionno = 1
starttime = 0
session: Container | None
sessions: dict[int, str] = {}
conn: Container | None
transport: Container | None
factory: Container | None
def getPeer(self):
"""Fake function for mockup."""
self.host = "1.1.1.1"
self.port = 2222
return self
def processEnded(self, reason):
"""Fake function for mockup."""
pass
class FakeTransport(proto_helpers.StringTransport):
"""Fake transport with abortConnection() method."""
# Thanks to TerminalBuffer (some code was taken from twisted Terminal Buffer)
redirFiles: set[list[str]] = set()
width = 80
height = 24
void = object()
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, N_COLORS = list(range(9))
for keyID in (
"UP_ARROW",
"DOWN_ARROW",
"RIGHT_ARROW",
"LEFT_ARROW",
"HOME",
"INSERT",
"DELETE",
"END",
"PGUP",
"PGDN",
"F1",
"F2",
"F3",
"F4",
"F5",
"F6",
"F7",
"F8",
"F9",
"F10",
"F11",
"F12",
):
exec(f"{keyID} = object()")
TAB = "\x09"
BACKSPACE = "\x08"
modes: dict[str, Callable] = {}
# '\x01': self.handle_HOME, # CTRL-A
# '\x02': self.handle_LEFT, # CTRL-B
# '\x03': self.handle_CTRL_C, # CTRL-C
# '\x04': self.handle_CTRL_D, # CTRL-D
# '\x05': self.handle_END, # CTRL-E
# '\x06': self.handle_RIGHT, # CTRL-F
# '\x08': self.handle_BACKSPACE, # CTRL-H
# '\x09': self.handle_TAB,
# '\x0B': self.handle_CTRL_K, # CTRL-K
# '\x0C': self.handle_CTRL_L, # CTRL-L
# '\x0E': self.handle_DOWN, # CTRL-N
# '\x10': self.handle_UP, # CTRL-P
# '\x15': self.handle_CTRL_U, # CTRL-U
def setModes(self, modes):
for m in modes:
self.modes[m] = True
aborting = False
transport = Container()
transport.session = Container()
transport.session.conn = Container()
transport.session.conn.transport = Container()
transport.session.conn.transport.transport = Container()
transport.session.conn.transport.transport.sessionno = 1
transport.session.conn.transport.factory = Container()
transport.session.conn.transport.factory.sessions = {}
transport.session.conn.transport.factory.starttime = 0
factory = Container()
session: dict[str, str] = {}
def abortConnection(self):
self.aborting = True
def resetModes(self, modes):
for m in modes:
try:
del self.modes[m]
except KeyError:
pass
def setPrivateModes(self, modes):
"""Enable the given modes.
Track which modes have been enabled so that the implementations of
other L{insults.ITerminalTransport} methods can be properly implemented
to respect these settings.
@see: L{resetPrivateModes}
@see: L{insults.ITerminalTransport.setPrivateModes}
"""
for m in modes:
self.privateModes[m] = True
def reset(self):
self.home = insults.Vector(0, 0)
self.x = self.y = 0
self.modes = {}
self.privateModes = {}
self.setPrivateModes(
[insults.privateModes.AUTO_WRAP, insults.privateModes.CURSOR_MODE]
)
self.numericKeypad = "app"
self.activeCharset = insults.G0
self.graphicRendition = {
"bold": False,
"underline": False,
"blink": False,
"reverseVideo": False,
"foreground": self.WHITE,
"background": self.BLACK,
}
self.charsets = {
insults.G0: insults.CS_US,
insults.G1: insults.CS_US,
insults.G2: insults.CS_ALTERNATE,
insults.G3: insults.CS_ALTERNATE_SPECIAL,
}
self.eraseDisplay()
def eraseDisplay(self):
self.lines = [self._emptyLine(self.width) for i in range(self.height)]
def _currentFormattingState(self):
return True
def _FormattingState(self):
return True
def _emptyLine(self, width):
return [(self.void, self._currentFormattingState()) for i in range(width)]
| 4,828 | 26.594286 | 84 | py |
cowrie | cowrie-master/src/cowrie/test/fake_server.py | # Copyright (c) 2016 Dave Germiquet
# See LICENSE for details.
from __future__ import annotations
from cowrie.shell import fs
class FakeServer:
"""FakeServer class.
@ivar hostname Servers Host Name
@ivar fs File System for cowrie to use
"""
def __init__(self):
self.arch = "linux-x64-lsb"
self.hostname = "unitTest"
self.fs = fs.HoneyPotFilesystem("arch", "/root")
self.process = None
class FakeAvatar:
"""FakeAvatar class.
@var avatar itself
@ivar server server configuration
@var fs File System for cowrie to use
@var environ for user
@var uid for user
"""
def __init__(self, server):
self.avatar = self
self.server = server
self.uid = 0
self.gid = 0
self.home = "/root"
self.username = "root"
self.environ = {
"LOGNAME": self.username,
"USER": self.username,
"HOME": self.home,
"TMOUT": "1800",
}
if self.uid == 0:
self.environ[
"PATH"
] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
else:
self.environ[
"PATH"
] = "/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games"
self.windowSize = [25, 80]
| 1,335 | 22.438596 | 78 | py |
cowrie | cowrie-master/src/cowrie/test/test_uniq.py | # Copyright (c) 2020 Peter Sufliarsky
# See LICENSE for details.
from __future__ import annotations
import os
import unittest
from cowrie.shell.protocol import HoneyPotInteractiveProtocol
from cowrie.test.fake_server import FakeAvatar, FakeServer
from cowrie.test.fake_transport import FakeTransport
os.environ["COWRIE_HONEYPOT_DATA_PATH"] = "data"
os.environ["COWRIE_HONEYPOT_DOWNLOAD_PATH"] = "/tmp"
os.environ["COWRIE_SHELL_FILESYSTEM"] = "share/cowrie/fs.pickle"
PROMPT = b"root@unitTest:~# "
class ShellUniqCommandTests(unittest.TestCase):
"""Tests for cowrie/commands/uniq.py."""
proto = HoneyPotInteractiveProtocol(FakeAvatar(FakeServer()))
tr = FakeTransport("", "31337")
@classmethod
def setUpClass(cls) -> None:
cls.proto.makeConnection(cls.tr)
@classmethod
def tearDownClass(cls) -> None:
cls.proto.connectionLost("tearDown From Unit Test")
def setUp(self) -> None:
self.tr.clear()
def test_uniq_command_001(self) -> None:
self.proto.lineReceived(b"echo test | uniq\n")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_uniq_command_002(self) -> None:
self.proto.lineReceived(b'echo -e "test\ntest\ntest" | uniq\n')
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_uniq_command_003(self) -> None:
self.proto.lineReceived(b"uniq\n")
self.proto.lineReceived(b"test\n")
self.proto.lineReceived(b"test\n")
self.proto.lineReceived(b"test\n")
self.proto.handle_CTRL_D()
self.assertEqual(self.tr.value(), b"test\n\n" + PROMPT)
| 1,619 | 30.764706 | 71 | py |
cowrie | cowrie-master/src/cowrie/test/test_cat.py | # Copyright (c) 2018 Michel Oosterhof
# See LICENSE for details.
from __future__ import annotations
import os
import unittest
from cowrie.shell.protocol import HoneyPotInteractiveProtocol
from cowrie.test.fake_server import FakeAvatar, FakeServer
from cowrie.test.fake_transport import FakeTransport
os.environ["COWRIE_HONEYPOT_DATA_PATH"] = "data"
os.environ["COWRIE_HONEYPOT_DOWNLOAD_PATH"] = "/tmp"
os.environ["COWRIE_SHELL_FILESYSTEM"] = "share/cowrie/fs.pickle"
PROMPT = b"root@unitTest:~# "
class ShellCatCommandTests(unittest.TestCase):
"""Test for cowrie/commands/cat.py."""
def setUp(self) -> None:
self.proto = HoneyPotInteractiveProtocol(FakeAvatar(FakeServer()))
self.tr = FakeTransport("", "31337")
self.proto.makeConnection(self.tr)
self.tr.clear()
def tearDown(self) -> None:
self.proto.connectionLost("tearDown From Unit Test")
def test_cat_command_001(self) -> None:
self.proto.lineReceived(b"cat nonExisting\n")
self.assertEqual(
self.tr.value(), b"cat: nonExisting: No such file or directory\n" + PROMPT
)
def test_cat_command_002(self) -> None:
self.proto.lineReceived(b"echo test | cat -\n")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_cat_command_003(self) -> None:
self.proto.lineReceived(b"echo 1 | cat\n")
self.proto.lineReceived(b"echo 2\n")
self.proto.handle_CTRL_D()
self.assertEqual(self.tr.value(), b"1\n" + PROMPT + b"2\n" + PROMPT)
def test_cat_command_004(self) -> None:
self.proto.lineReceived(b"cat\n")
self.proto.lineReceived(b"test\n")
self.proto.handle_CTRL_C()
self.assertEqual(self.tr.value(), b"test\n^C\n" + PROMPT)
| 1,773 | 33.115385 | 86 | py |
cowrie | cowrie-master/src/cowrie/test/test_echo.py | # Copyright (c) 2018 Michel Oosterhof
# See LICENSE for details.
from __future__ import annotations
import os
import unittest
from cowrie.shell.protocol import HoneyPotInteractiveProtocol
from cowrie.test.fake_server import FakeAvatar, FakeServer
from cowrie.test.fake_transport import FakeTransport
os.environ["COWRIE_HONEYPOT_DATA_PATH"] = "data"
os.environ["COWRIE_HONEYPOT_DOWNLOAD_PATH"] = "/tmp"
os.environ["COWRIE_SHELL_FILESYSTEM"] = "share/cowrie/fs.pickle"
PROMPT = b"root@unitTest:~# "
class ShellEchoCommandTests(unittest.TestCase):
"""Test for echo command from cowrie/commands/base.py."""
proto = HoneyPotInteractiveProtocol(FakeAvatar(FakeServer()))
tr = FakeTransport("", "31337")
@classmethod
def setUpClass(cls) -> None:
cls.proto.makeConnection(cls.tr)
@classmethod
def tearDownClass(cls) -> None:
cls.proto.connectionLost("tearDown From Unit Test")
def setUp(self) -> None:
self.tr.clear()
def test_echo_command_001(self) -> None:
self.proto.lineReceived(b'echo "test"\n')
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_002(self) -> None:
self.proto.lineReceived(b"echo test test\n")
self.assertEqual(self.tr.value(), b"test test\n" + PROMPT)
def test_echo_command_003(self) -> None:
self.proto.lineReceived(b'echo -n "test test"\n')
self.assertEqual(self.tr.value(), b"test test" + PROMPT)
def test_echo_command_005(self) -> None:
self.proto.lineReceived(b"echo test > test5; cat test5")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_006(self) -> None:
self.proto.lineReceived(b'echo "\\n"\n')
self.assertEqual(self.tr.value(), b"\\n\n" + PROMPT)
def test_echo_command_007(self) -> None:
self.proto.lineReceived(b"echo test >> test7; cat test7")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_008(self) -> None:
self.proto.lineReceived(b"echo test > test8; echo test >> test8; cat test8")
self.assertEqual(self.tr.value(), b"test\ntest\n" + PROMPT)
def test_echo_command_009(self) -> None:
self.proto.lineReceived(b"echo test | grep test")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_010(self) -> None:
self.proto.lineReceived(b"echo test | grep test2")
self.assertEqual(self.tr.value(), PROMPT)
def test_echo_command_011(self) -> None:
self.proto.lineReceived(b"echo test > test011; cat test011 | grep test")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_012(self) -> None:
self.proto.lineReceived(b"echo test > test012; grep test test012")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_013(self) -> None:
self.proto.lineReceived(b'echo "ls""ls"')
self.assertEqual(self.tr.value(), b"lsls\n" + PROMPT)
def test_echo_command_014(self) -> None:
self.proto.lineReceived(b"echo '\"ls\"'")
self.assertEqual(self.tr.value(), b'"ls"\n' + PROMPT)
def test_echo_command_015(self) -> None:
self.proto.lineReceived(b"echo \"'ls'\"")
self.assertEqual(self.tr.value(), b"'ls'\n" + PROMPT)
def test_echo_command_016(self) -> None:
self.proto.lineReceived(b'echo -e "\x6b\x61\x6d\x69"')
self.assertEqual(self.tr.value(), b"kami\n" + PROMPT)
def test_echo_command_017(self) -> None:
self.proto.lineReceived(b"echo echo test | bash")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_018(self) -> None:
self.proto.lineReceived(b"echo $(echo test)")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_019(self) -> None:
self.proto.lineReceived(b"echo $(echo $(echo test))")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_020(self) -> None:
self.proto.lineReceived(b"echo test_$(echo test)_test")
self.assertEqual(self.tr.value(), b"test_test_test\n" + PROMPT)
def test_echo_command_021(self) -> None:
self.proto.lineReceived(b"echo test_$(echo test)_test_$(echo test)_test")
self.assertEqual(self.tr.value(), b"test_test_test_test_test\n" + PROMPT)
def test_echo_command_022(self) -> None:
self.proto.lineReceived(b"echo test; (echo test)")
self.assertEqual(self.tr.value(), b"test\ntest\n" + PROMPT)
def test_echo_command_023(self) -> None:
self.proto.lineReceived(b"echo `echo test`")
self.assertEqual(self.tr.value(), b"test\n" + PROMPT)
def test_echo_command_024(self) -> None:
self.proto.lineReceived(b"echo test_`echo test`_test")
self.assertEqual(self.tr.value(), b"test_test_test\n" + PROMPT)
def test_echo_command_025(self) -> None:
self.proto.lineReceived(b"echo test_`echo test`_test_`echo test`_test")
self.assertEqual(self.tr.value(), b"test_test_test_test_test\n" + PROMPT)
def test_echo_command_026(self) -> None:
self.proto.lineReceived(b'echo "TEST1: `echo test1`, TEST2: `echo test2`"')
self.assertEqual(self.tr.value(), b"TEST1: test1, TEST2: test2\n" + PROMPT)
def test_echo_command_027(self) -> None:
self.proto.lineReceived(b"echo $LOGNAME")
self.assertEqual(self.tr.value(), b"root\n" + PROMPT)
def test_echo_command_028(self) -> None:
self.proto.lineReceived(b"echo ${LOGNAME}")
self.assertEqual(self.tr.value(), b"root\n" + PROMPT)
def test_echo_command_029(self) -> None:
self.proto.lineReceived(b"echo $(e)")
self.assertEqual(self.tr.value(), b"-bash: e: command not found\n\n" + PROMPT)
| 5,836 | 38.707483 | 86 | py |
cowrie | cowrie-master/src/cowrie/test/test_base64.py | # Copyright (c) 2020 Peter Sufliarsky
# See LICENSE for details.
from __future__ import annotations
import os
import unittest
from cowrie.shell.protocol import HoneyPotInteractiveProtocol
from cowrie.test.fake_server import FakeAvatar, FakeServer
from cowrie.test.fake_transport import FakeTransport
os.environ["COWRIE_HONEYPOT_DATA_PATH"] = "data"
os.environ["COWRIE_HONEYPOT_DOWNLOAD_PATH"] = "/tmp"
os.environ["COWRIE_SHELL_FILESYSTEM"] = "share/cowrie/fs.pickle"
TRY_CHMOD_HELP_MSG = b"Try 'base64 --help' for more information.\n"
PROMPT = b"root@unitTest:~# "
class ShellBase64CommandTests(unittest.TestCase):
"""Tests for cowrie/commands/base64.py"""
proto = HoneyPotInteractiveProtocol(FakeAvatar(FakeServer()))
tr = FakeTransport("", "31337")
@classmethod
def setUpClass(cls) -> None:
cls.proto.makeConnection(cls.tr)
@classmethod
def tearDownClass(cls) -> None:
cls.proto.connectionLost("tearDown From Unit Test")
def setUp(self) -> None:
self.tr.clear()
def test_base64_command_001(self) -> None:
self.proto.lineReceived(b"echo cowrie | base64")
self.assertEqual(self.tr.value(), b"Y293cmllCg==\n" + PROMPT)
def test_base64_command_002(self) -> None:
self.proto.lineReceived(b"echo Y293cmllCg== | base64 -d")
self.assertEqual(self.tr.value(), b"cowrie\n" + PROMPT)
| 1,383 | 30.454545 | 69 | py |
cowrie | cowrie-master/src/cowrie/test/proxy_compare.py | from __future__ import annotations
from backend_pool.ssh_exec import execute_ssh
from backend_pool.telnet_exec import execute_telnet
from twisted.internet import defer
class ProxyTestCommand:
"""
This class executes commands on Proxy instances and their backends (or either one of them).
If executing on both, it compares their outputs, and a deferred succeeds on that case.
"""
def __init__(
self,
type,
hostname,
port_backend,
port_proxy,
username_backend,
password_backend,
username_proxy,
password_proxy,
):
self.deferred = defer.Deferred()
self.backend_data = None
self.proxy_data = None
self.hostname = hostname
self.port_backend = port_backend
self.port_proxy = port_proxy
self.username_backend = username_backend
self.password_backend = password_backend
self.username_proxy = username_proxy
self.password_proxy = password_proxy
# whether to execute the command via SSH or Telnet
self.execute = execute_ssh if type == "ssh" else execute_telnet
def execute_both(self, command):
def callback_backend(data):
# if we haven't received data from the proxy just store the output
if not self.proxy_data:
self.backend_data = data
else:
# compare data from proxy and backend
if data == self.proxy_data:
self.deferred.callback(True)
else:
self.deferred.errback(ValueError())
def callback_proxy(data):
# if we haven't received data from the backend just store the output
if not self.backend_data:
self.proxy_data = data
else:
# compare data from proxy and backend
if data == self.backend_data:
self.deferred.callback(True)
else:
self.deferred.errback(
ValueError("Values from proxy and backend do not match!")
)
# execute exec command on both backend and proxy
self.execute(
self.hostname,
self.port_backend,
self.username_backend,
self.password_backend,
command,
callback_backend,
)
self.execute(
self.hostname,
self.port_proxy,
self.username_proxy,
self.password_proxy,
command,
callback_proxy,
)
def execute_one(self, is_proxy, command, deferred):
def callback(data):
deferred.callback(data)
if is_proxy:
# execute via proxy
username = self.username_proxy
password = self.password_proxy
else:
# execute via backend
username = self.username_backend
password = self.password_backend
# execute exec command
self.execute(
self.hostname, self.port_backend, username, password, command, callback
)
| 3,172 | 30.107843 | 95 | py |
cowrie | cowrie-master/src/cowrie/test/test_ftpget.py | # Copyright (c) 2018 Michel Oosterhof
# See LICENSE for details.
from __future__ import annotations
import os
import unittest
from cowrie.shell.protocol import HoneyPotInteractiveProtocol
from cowrie.test.fake_server import FakeAvatar, FakeServer
from cowrie.test.fake_transport import FakeTransport
os.environ["COWRIE_HONEYPOT_DATA_PATH"] = "data"
os.environ["COWRIE_HONEYPOT_DOWNLOAD_PATH"] = "/tmp"
os.environ["COWRIE_SHELL_FILESYSTEM"] = "share/cowrie/fs.pickle"
PROMPT = b"root@unitTest:~# "
class ShellFtpGetCommandTests(unittest.TestCase):
"""Tests for cowrie/commands/ftpget.py."""
proto = HoneyPotInteractiveProtocol(FakeAvatar(FakeServer()))
tr = FakeTransport("", "31337")
@classmethod
def setUpClass(cls) -> None:
cls.proto.makeConnection(cls.tr)
@classmethod
def tearDownClass(cls) -> None:
cls.proto.connectionLost("tearDown From Unit Test")
def setUp(self) -> None:
self.tr.clear()
def test_help_command(self) -> None:
usage = (
b"BusyBox v1.20.2 (2016-06-22 15:12:53 EDT) multi-call binary.\n"
b"\n"
b"Usage: ftpget [OPTIONS] HOST [LOCAL_FILE] REMOTE_FILE\n"
b"\n"
b"Download a file via FTP\n"
b"\n"
b" -c Continue previous transfer\n"
b" -v Verbose\n"
b" -u USER Username\n"
b" -p PASS Password\n"
b" -P NUM Port\n\n"
)
self.proto.lineReceived(b"ftpget\n")
self.assertEqual(self.tr.value(), usage + PROMPT)
| 1,588 | 28.981132 | 77 | py |