Challenge: encode / decode 400 (or more) ASCII characters in a single tweet!
// Encoder
window.e=function(source,i,tmp,result){
tmp = "";
result = "";
for(i in source){
tmp += (0+source.charCodeAt(i).toString(2)).slice(-7);
}
for(i = 0; tmp; tmp = tmp.slice(10)){
result += String.fromCharCode((i++ % 2 ? 0xDC00 : 0xD800) + parseInt(tmp.substring(0, 10), 2));
}
return result;
}
// Decoder
window.d=function(source,i,tmp,result){
tmp = "";
result = "";
for(i = 0; i < 400; ){
tmp = tmp.slice(7) + source.charCodeAt(i++).toString(2).slice(-10);
result += String.fromCharCode(parseInt(tmp.substring(0, 7), 2));
}
return result;
}
Hi,
A tweet can contain 140 UTF-16 characters.
An UTF-16 character can be composed of 2 16-bits surrogates.
A UTF-16 surrogate can be used to store 10 bits.
An ASCII character is 7 bits long.
So, a tweet can encode 140 x 2 x 10 = 2800 bits = 400 plain ASCII characters.
The challenge is to make an encoder (converting 400 - or more - ASCII chars in 140 UTF-16 chars) and a decoder (doing the opposite) that can both fit in a tweet.
NB: the encoder and decoder can be packed with this: https://gist.github.com/xem/7086007
NB2: non-printable characters 0x00 to 0x1F and 0x7F can be omitted.
Have fun!
Encodes 400 ASCII chars.
Encoder: 190 chars minified, 140 chars packed
e=function(e,c,b,d){d=b="";for(c in e)b+=(0+e.charCodeAt(c).toString(2)).slice(-7);for(c=0;b;b=b.slice(10))d+=String.fromCharCode((c++%2?56320:55296)+parseInt(b.substring(0,10),2));return d}
// or
eval(unescape(escape("鐽页롣큩뱮ꁥ끣끢끤ꑻ逽蠽蠢뱲ꁣ聩렠鐩蠫쀫鐮豨葲౯遥Ѵꁣꐮ큯䱴졩롧ꀲꐩ롳끩豥ꀭ뱲ꁣ롳끩豥ꀱ쀩ꑤ갽䱴졩롧롦졯둃ꁡ졃뱤鐨ꁣ갫鐲ﰵ퐲ꐫ쁡졳鑉롴ꁢ롳푢챴졩롧ꀰ뀱쀩뀲ꐩ鑴푲렠遽").replace(/uD./g,'')))
Decoder: 159 chars minified, 124 chars packed
e=function(e,d,b,c){c=b="";for(d=0;400>d;)b=b.slice(7)+e.charCodeAt(d++).toString(2).slice(-10),c+=String.fromCharCode(parseInt(b.substring(0,7),2));return c}
// or
eval(unescape(escape("逽页롣큩뱮ꁥ끤끢끣ꑻ谽蠽蠢뱲ꁤ쀰蠽蠮챬ꑣ鐨롣ꁡ졃뱤鑁퀨逫갩롴뱓큲ꑮ鰨젩롳끩豥ꀭ쐰ꐬ谫큲ꑮ鰮顲뱭౨葲౯遥ꁰ葲챥⑮퀨蠮챵衳큲ꑮ鰨쀬ꐩ鑴푲렠豽").replace(/uD./g,'')))
Demo and source code: