In Part 2, you may have noticed that the bits representing the value of an unsigned int did not appear to match the bits representing the object. This is because of the endianness of my implementation. When it comes to displaying the bits of objects that take more space that a char, this issue will come into play.

#include <stdio.h>
#include <limits.h>

char *bits_le(char *dst, const void *object, size_t size)
{
   char *start = dst;
   const unsigned char *byte = object;
   for ( byte += size - 1; size > 0; --size, --byte )
   {
      unsigned char bit;
      for ( bit = 1 << (CHAR_BIT * sizeof *byte - 1); bit; bit >>= 1 )
      {
         *dst++ = (*byte & bit) ? '1' : '0';
      }
   }
   *dst = '\0';
   return start;
}

char *bits_be(char *dst, const void *object, size_t size)
{
   char *start = dst;
   const unsigned char *byte = object;
   for ( /* no additional initialization */; size > 0; --size, ++byte )
   {
      unsigned char bit;
      for ( bit = 1 << (CHAR_BIT * sizeof *byte - 1); bit; bit >>= 1 )
      {
         *dst++ = (*byte & bit) ? '1' : '0';
      }
   }
   *dst = '\0';
   return start;
}

int main(void)
{
   unsigned long value = 0x12345678;
   double d = 123.456;
   char buffer [ sizeof d * CHAR_BIT + 1 ];
   printf("value = 0x%lX: %s\n", value, bits_le(buffer, &value, sizeof value));
   printf("value = 0x%lX: %s\n", value, bits_be(buffer, &value, sizeof value));
   printf("d = %g: %s\n", d, bits_le(buffer, &d, sizeof d));
   printf("d = %g: %s\n", d, bits_be(buffer, &d, sizeof d));
   return 0;
}

/* my output
value = 0x12345678: 00010010001101000101011001111000
value = 0x12345678: 01111000010101100011010000010010
d = 123.456: 0100000001011110110111010010111100011010100111111011111001110111
d = 123.456: 0111011110111110100111110001101000101111110111010101111001000000
*/
The article starter has earned a lot of community kudos, and such articles offer a bounty for quality replies.