Return a string giving the literal bit representation of an unsigned 16-bit integer.
var toBinaryString = require( '@stdlib/math/base/utils/uint16-to-binary-string' );
Returns a string
giving the literal bit representation of an unsigned 16-bit integer.
var a = new Uint16Array( [ 1, 4, 9 ] );
var str = toBinaryString( a[0] );
// returns '0000000000000001'
str = toBinaryString( a[1] );
// returns '0000000000000100'
str = toBinaryString( a[2] );
// returns '0000000000001001'
-
Except for typed arrays, JavaScript does not provide native user support for unsigned 16-bit integers. According to the ECMAScript standard,
number
values correspond to double-precision floating-point numbers. While this function is intended for unsigned 16-bit integers, the function will accept floating-point values and represent the values as if they are unsigned 16-bit integers. Accordingly, care should be taken to ensure that only nonnegative integer values less than65536
(2^16
) are provided.var str = toBinaryString( 1 ); // returns '0000000000000001' str = toBinaryString( 4 ); // returns '0000000000000100' str = toBinaryString( 9 ); // returns '0000000000001001' str = toBinaryString( 65535 ); // returns '1111111111111111'
var randu = require( '@stdlib/math/base/random/randu' );
var round = require( '@stdlib/math/base/special/round' );
var MAX_UINT16 = require( '@stdlib/math/constants/uint16-max' );
var toBinaryString = require( '@stdlib/math/base/utils/uint16-to-binary-string' );
var x;
var y;
var b;
var i;
// Generate random unsigned 16-bit integers...
x = new Uint16Array( 100 );
for ( i = 0; i < x.length; i++ ) {
x[ i ] = round( randu()*MAX_UINT16 );
}
// Convert unsigned 16-bit integers to literal bit representations...
for ( i = 0; i < x.length; i++ ) {
b = bits( x[i] );
y = parseInt( b, 2 );
console.log( 'x: %d, b: %s, y: %d', x[i], b, y );
}