pip3 install pipenv
pipenv shell
| #!/usr/bin/env python | |
| import os, os.path, stat, sys, base64 | |
| # TOTP lib inlined | |
| import time, hmac, base64, hashlib, struct | |
| def pack_counter(t): | |
| return struct.pack(">Q", t) |
| version: '2' | |
| services: | |
| database: | |
| image: 'zulip/zulip-postgresql:10' | |
| environment: | |
| POSTGRES_DB: 'zulip' | |
| POSTGRES_USER: 'zulip' | |
| # Note that you need to do a manual `ALTER ROLE` query if you | |
| # change this on a system after booting the postgres container | |
| # the first time on a host. Instructions are available in README.md. |
| # (c) Copyright 2018 Zymergen, Inc. | |
| # All Rights Reserved | |
| """ | |
| The following is example code used for a technology blog post: https://medium.com/@ZymergenTechBlog/building-a-parallel-task-api-with-celery-dbae5ced4e28 | |
| The ForkJoin class can be used to generate a ZWork task that contains a single | |
| distributed processing step. Your job should have 3 parts. An initial setup step | |
| responsible for splitting of inputs into workable chunks. A process step that can | |
| process each chunk in a forked execution process and a join step that puts it all |
| package encryption | |
| import ( | |
| "crypto/cipher" | |
| "runtime" | |
| "unsafe" | |
| ) | |
| func dup(p []byte) []byte { | |
| q := make([]byte, len(p)) |
| #!/bin/bash | |
| ## | |
| while true | |
| do | |
| proxychains curl -s https://bonbast.com | lynx --dump --stdin|grep USD|awk '{print $5}' | |
| sleep 60 | |
| done |
| package main | |
| import ( | |
| "context" | |
| "flag" | |
| "fmt" | |
| "log" | |
| "net/http" | |
| "os" | |
| "os/signal" |
| import logging.config | |
| import os | |
| from django.utils.log import DEFAULT_LOGGING | |
| # Disable Django's logging setup | |
| LOGGING_CONFIG = None | |
| LOGLEVEL = os.environ.get('LOGLEVEL', 'info').upper() | |
| logging.config.dictConfig({ |
My Elasticsearch cheatsheet with example usage via rest api (still a work-in-progress)