I hereby claim:
- I am usmanm on github.
- I am usmanm (https://keybase.io/usmanm) on keybase.
- I have a public key ASDeJktH_qIjoHbQe-6vYWb07sB_Hpwns9Wr04FfovUx9Ao
To claim this, I am signing this object:
| import hashlib | |
| import hmac | |
| import json | |
| from urllib.parse import urlparse, urlunparse | |
| from django.http import HttpRequest | |
| def verify_signature(signing_key: str, request: HttpRequest, max_delay: int = 30) -> bool: | |
| """ |
| /* | |
| g++ -O3 -Wall -shared -std=c++11 \ | |
| -undefined dynamic_lookup \ | |
| $(python3 -m pybind11 --includes) \ | |
| -Isdk/macos-arm/include \ | |
| krisp_processor.cpp \ | |
| sdk/macos-arm/lib/libkrisp-audio-sdk.a \ | |
| sdk/macos-arm/external/libresample.a \ | |
| -o krisp_processor$(python3-config --extension-suffix) | |
| */ |
| Humanoids for the digital world. |
I hereby claim:
To claim this, I am signing this object:
| #!/bin/bash | |
| sudo apt-get install -y libtool autoconf automake | |
| wget https://github.com/zeromq/zeromq4-1/releases/download/v4.1.5/zeromq-4.1.5.tar.gz && \ | |
| tar -xvf zeromq-4.1.5.tar.gz && \ | |
| cd zeromq-4.1.5/ && \ | |
| ./autogen.sh && \ | |
| ./configure --enable-static --prefix=/usr && \ | |
| make && \ |
| #!/bin/bash | |
| wget https://github.com/nanomsg/nanomsg/archive/1.0.0.tar.gz && \ | |
| tar -xvf 1.0.0.tar.gz && \ | |
| cd nanomsg-1.0.0 && \ | |
| mkdir build && \ | |
| cd build && \ | |
| cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DNN_STATIC_LIB=1 && -DCMAKE_POSITION_INDEPENDENT_CODE=1 \ | |
| cmake --build . && \ | |
| sudo cmake --build . --target install && \ |
| # pipeline_kafka Broker API | |
| rm -rf /tmp/zookeeper; ./bin/zookeeper-server-start.sh config/zookeeper.properties | |
| rm -rf /tmp/kafka-logs; ./bin/kafka-server-start.sh config/server.properties | |
| # pipeline_kafka Consumer API | |
| cat ~/snippets/pipeline_kafka.conf >> ~/pdb/data/pipelinedb.conf | |
| ./bin/kafka-topics.sh --zookeeper localhost:2181 --topic consumer_topic --create --partitions 5 --replication-factor 1 | |
| for i in $(seq 1 1000); do echo { \"x\": $i }; done | kafkacat -P -b localhost:9092 -t my_topic | |
| # pipeline_kafka Producer API |
| -- Continuous View | |
| CREATE STREAM s0 (x int); | |
| CREATE CONTINUOUS VIEW cv AS | |
| SELECT x, count(*) FROM s0 GROUP BY x; | |
| INSERT INTO s0 (x) | |
| SELECT x % 10 FROM generate_series(1, 1000) AS x; | |
| -- Continuous Transform | |
| CREATE STREAM s1 (x int); | |
| CREATE CONTINUOUS TRANSFORM ct AS |
| package main | |
| import ( | |
| "database/sql" | |
| "fmt" | |
| "math/rand" | |
| _ "github.com/lib/pq" | |
| ) | |
| func main() { |
| -- Enforce a strongly typed schema on this stream | |
| CREATE STREAM ab_event_stream | |
| ( | |
| name text, | |
| ab_group text, | |
| event_type varchar(1), | |
| cookie varchar(32) | |
| ); | |
| CREATE CONTINUOUS VIEW ab_test_monitor AS |