Question Conversion rapide entre UInt et Int


J'ai les méthodes suivantes

var photos = [MWPhoto] = [MWPhoto]()

func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {

    return self.photos.count
}

func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {

    return self.photos[index]
}

Cependant pour le premier je reçois Int is not convertible to UInt (depuis self.photos.count est un Int 

et pour la seconde UInt is not convertible to Int - depuis le self.photos[ ne peut prendre qu'un Int pour son index.

Comment puis-je convertir correctement le UInt en Int et en arrière?


35
2017-09-28 15:25


origine


Réponses:


Dans le premier, le type de retour est UInt, mais vous retournez Int car count renvoie Int.

Fondamentalement, UInt a un initialiseur qui prend des variantes des arguments de types de valeur tels que Int, CGFloat, Double ou une chaîne d'événement et renvoie un nouveau type de valeur.

  • UInt (8) // le résultat est 8 type de valeur UInt
  • UInt (20.12) // le résultat est 20 type de valeur UInt
  • UInt (Double (10)) // le résultat est 10 type de valeur UInt
  • UInt ("10") // résultat est 10 type de valeur UInt, notez que cet initialiseur est disponible, peut être une valeur ou une valeur nulle

-

func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {

    return UInt(self.photos.count)
}

Pour le second, l'indice de tableau attend la valeur Int où vous passez UInt, créez donc un nouveau Int type de valeur de UInt,

func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {

    return self.photos[Int(index)]
}

57
2017-09-28 15:42



// initializing Int
var someInt: Int = 8
someInt

// Converting Int to UInt
var someIntToUInt: UInt = UInt(someInt)
someIntToUInt

// initializing UInt   
var someUInt: UInt = 10
someUInt

// Converting UInt to Int   
var someUIntToInt: Int = Int(someUInt)
someUIntToInt

8
2017-09-28 15:40



Si vous voulez que unsigned int de valeur négative, utilisez UInt (bitPattern :)

let intVal = -1
let uintVal = UInt(bitPattern: intVal) // uintVal == 0xffffffffffffffff

4
2018-01-15 16:22



Ajoutez ceci n'importe où en dehors d'une classe:

extension UInt {
    /// SwiftExtensionKit
    var toInt: Int { return Int(self) }
}

Alors appelez simplement:

self.photos[index].toInt

1
2017-10-29 19:20



Je suis tellement frustré par les paramètres de la méthode cryptique de Swift bitPattern: et truncatingBitPattern: et mon incapacité à me rappeler lequel utiliser lorsque j'ai créé la classe suivante contenant un grand nombre de méthodes de conversion.

Je ne vous recommande pas nécessairement d'inclure cela dans votre programme. Je suis sûr que beaucoup de gens diront que Swift essaie de nous protéger de nous-mêmes et que saboter cet effort est stupide. Alors, vous devriez peut-être simplement conserver ce fichier quelque part comme une sorte de feuille de triche afin de déterminer rapidement comment effectuer une conversion et copier les paramètres dans votre programme si nécessaire.

Incidemment, JDI signifie "juste le faire".

/// Class containing a large number of static methods to convert an Int to a UInt or vice-versa, and 
/// also to perform conversions between different bit sizes, for example UInt32 to UInt8.
///
/// Many of these "conversions" are trivial, and are only included for the sake of completeness.
///
/// A few of the conversions involving Int and UInt can give different results when run on 32-bit
/// and 64-bit systems. All of the conversion where the bit size of both the source and the target
/// are specified will always give the same result independent of platform.
public class JDI {

   // MARK: - To signed Int

   // To Int8
   public static func ToInt8(_ x : Int8) -> Int8 {
      return x
   }
   public static func ToInt8(_ x : Int32) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : Int64) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : Int) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : UInt8) -> Int8 {
      return Int8(bitPattern: x)
   }
   public static func ToInt8(_ x : UInt32) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : UInt64) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }
   public static func ToInt8(_ x : UInt) -> Int8 {
      return Int8(truncatingBitPattern: x)
   }

   // To Int32
   public static func ToInt32(_ x : Int8) -> Int32 {
      return Int32(x)
   }
   public static func ToInt32(_ x : Int32) -> Int32 {
      return x
   }
   public static func ToInt32(_ x : Int64) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }
   public static func ToInt32(_ x : Int) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }
   public static func ToInt32(_ x : UInt8) -> Int32 {
      return Int32(x)
   }
   public static func ToInt32(_ x : UInt32) -> Int32 {
      return Int32(bitPattern: x)
   }
   public static func ToInt32(_ x : UInt64) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }
   public static func ToInt32(_ x : UInt) -> Int32 {
      return Int32(truncatingBitPattern: x)
   }

   // To Int64
   public static func ToInt64(_ x : Int8) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : Int32) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : Int64) -> Int64 {
      return x
   }
   public static func ToInt64(_ x : Int) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : UInt8) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : UInt32) -> Int64 {
      return Int64(x)
   }
   public static func ToInt64(_ x : UInt64) -> Int64 {
      return Int64(bitPattern: x)
   }
   public static func ToInt64(_ x : UInt) -> Int64 {
      return Int64(bitPattern: UInt64(x))  // Does not extend high bit of 32-bit input
   }

   // To Int
   public static func ToInt(_ x : Int8) -> Int {
      return Int(x)
   }
   public static func ToInt(_ x : Int32) -> Int {
      return Int(x)
   }
   public static func ToInt(_ x : Int64) -> Int {
      return Int(truncatingBitPattern: x)
   }
   public static func ToInt(_ x : Int) -> Int {
      return x
   }
   public static func ToInt(_ x : UInt8) -> Int {
      return Int(x)
   }
   public static func ToInt(_ x : UInt32) -> Int {
      if MemoryLayout<Int>.size == MemoryLayout<Int32>.size {
         return Int(Int32(bitPattern: x))  // For 32-bit systems, non-authorized interpretation
      }
      return Int(x)
   }
   public static func ToInt(_ x : UInt64) -> Int {
      return Int(truncatingBitPattern: x)
   }
   public static func ToInt(_ x : UInt) -> Int {
      return Int(bitPattern: x)
   }

   // MARK: - To unsigned Int

   // To UInt8
   public static func ToUInt8(_ x : Int8) -> UInt8 {
      return UInt8(bitPattern: x)
   }
   public static func ToUInt8(_ x : Int32) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : Int64) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : Int) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : UInt8) -> UInt8 {
      return x
   }
   public static func ToUInt8(_ x : UInt32) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : UInt64) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }
   public static func ToUInt8(_ x : UInt) -> UInt8 {
      return UInt8(truncatingBitPattern: x)
   }

   // To UInt32
   public static func ToUInt32(_ x : Int8) -> UInt32 {
      return UInt32(bitPattern: Int32(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt32(_ x : Int32) -> UInt32 {
      return UInt32(bitPattern: x)
   }
   public static func ToUInt32(_ x : Int64) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }
   public static func ToUInt32(_ x : Int) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }
   public static func ToUInt32(_ x : UInt8) -> UInt32 {
      return UInt32(x)
   }
   public static func ToUInt32(_ x : UInt32) -> UInt32 {
      return x
   }
   public static func ToUInt32(_ x : UInt64) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }
   public static func ToUInt32(_ x : UInt) -> UInt32 {
      return UInt32(truncatingBitPattern: x)
   }

   // To UInt64
   public static func ToUInt64(_ x : Int8) -> UInt64 {
      return UInt64(bitPattern: Int64(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt64(_ x : Int32) -> UInt64 {
      return UInt64(bitPattern: Int64(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt64(_ x : Int64) -> UInt64 {
      return UInt64(bitPattern: x)
   }
   public static func ToUInt64(_ x : Int) -> UInt64 {
      return UInt64(bitPattern: Int64(x))  // Extend sign bit if necessary, assume minus input significant
   }
   public static func ToUInt64(_ x : UInt8) -> UInt64 {
      return UInt64(x)
   }
   public static func ToUInt64(_ x : UInt32) -> UInt64 {
      return UInt64(x)
   }
   public static func ToUInt64(_ x : UInt64) -> UInt64 {
      return x
   }
   public static func ToUInt64(_ x : UInt) -> UInt64 {
      return UInt64(x)  // Does not extend high bit of 32-bit input
   }

   // To UInt
   public static func ToUInt(_ x : Int8) -> UInt {
      return UInt(bitPattern: Int(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt(_ x : Int32) -> UInt {
      return UInt(truncatingBitPattern: Int64(x))  // Extend sign bit, assume minus input significant
   }
   public static func ToUInt(_ x : Int64) -> UInt {
      return UInt(truncatingBitPattern: x)
   }
   public static func ToUInt(_ x : Int) -> UInt {
      return UInt(bitPattern: x)
   }
   public static func ToUInt(_ x : UInt8) -> UInt {
      return UInt(x)
   }
   public static func ToUInt(_ x : UInt32) -> UInt {
      return UInt(x)
   }
   public static func ToUInt(_ x : UInt64) -> UInt {
      return UInt(truncatingBitPattern: x)
   }
   public static func ToUInt(_ x : UInt) -> UInt {
      return x
   }
}

Voici un code de test:

   public func doTest() {

      // To Int8

      assert(JDI.ToInt8(42 as Int8) == 42)
      assert(JDI.ToInt8(-13 as Int8) == -13)

      assert(JDI.ToInt8(42 as Int32) == 42)
      assert(JDI.ToInt8(257 as Int32) == 1)

      assert(JDI.ToInt8(42 as Int64) == 42)
      assert(JDI.ToInt8(257 as Int64) == 1)

      assert(JDI.ToInt8(42 as Int) == 42)
      assert(JDI.ToInt8(257 as Int) == 1)

      assert(JDI.ToInt8(42 as UInt8) == 42)
      assert(JDI.ToInt8(0xf3 as UInt8) == -13)

      assert(JDI.ToInt8(42 as UInt32) == 42)
      assert(JDI.ToInt8(0xfffffff3 as UInt32) == -13)

      assert(JDI.ToInt8(42 as UInt64) == 42)
      assert(JDI.ToInt8(UInt64.max - 12) == -13)

      assert(JDI.ToInt8(42 as UInt) == 42)
      assert(JDI.ToInt8(UInt.max - 12) == -13)

      // To Int32

      assert(JDI.ToInt32(42 as Int8) == 42)
      assert(JDI.ToInt32(-13 as Int8) == -13)

      assert(JDI.ToInt32(42 as Int32) == 42)
      assert(JDI.ToInt32(-13 as Int32) == -13)

      assert(JDI.ToInt32(42 as Int64) == 42)
      assert(JDI.ToInt32(Int64(Int32.min) - 1) == Int32.max)

      assert(JDI.ToInt32(42 as Int) == 42)
      assert(JDI.ToInt32(-13 as Int) == -13)

      assert(JDI.ToInt32(42 as UInt8) == 42)
      assert(JDI.ToInt32(0xf3 as UInt8) == 243)

      assert(JDI.ToInt32(42 as UInt32) == 42)
      assert(JDI.ToInt32(0xfffffff3 as UInt32) == -13)

      assert(JDI.ToInt32(42 as UInt64) == 42)
      assert(JDI.ToInt32(UInt64.max - 12) == -13)

      assert(JDI.ToInt32(42 as UInt) == 42)
      assert(JDI.ToInt32(UInt.max - 12) == -13)

      // To Int64

      assert(JDI.ToInt64(42 as Int8) == 42)
      assert(JDI.ToInt64(-13 as Int8) == -13)

      assert(JDI.ToInt64(42 as Int32) == 42)
      assert(JDI.ToInt64(-13 as Int32) == -13)

      assert(JDI.ToInt64(42 as Int64) == 42)
      assert(JDI.ToInt64(-13 as Int64) == -13)

      assert(JDI.ToInt64(42 as Int) == 42)
      assert(JDI.ToInt64(-13 as Int) == -13)

      assert(JDI.ToInt64(42 as UInt8) == 42)
      assert(JDI.ToInt64(0xf3 as UInt8) == 243)

      assert(JDI.ToInt64(42 as UInt32) == 42)
      assert(JDI.ToInt64(0xfffffff3 as UInt32) == 4294967283)

      assert(JDI.ToInt64(42 as UInt64) == 42)
      assert(JDI.ToInt64(UInt64.max - 12) == -13)

      assert(JDI.ToInt64(42 as UInt) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToInt64(UInt.max - 12) == 4294967283)  // For 32-bit systems
      #else
         assert(JDI.ToInt64(UInt.max - 12) == -13)  // For 64-bit systems
      #endif

      // To Int

      assert(JDI.ToInt(42 as Int8) == 42)
      assert(JDI.ToInt(-13 as Int8) == -13)

      assert(JDI.ToInt(42 as Int32) == 42)
      assert(JDI.ToInt(-13 as Int32) == -13)

      assert(JDI.ToInt(42 as Int64) == 42)
      assert(JDI.ToInt(-13 as Int64) == -13)

      assert(JDI.ToInt(42 as Int) == 42)
      assert(JDI.ToInt(-13 as Int) == -13)

      assert(JDI.ToInt(42 as UInt8) == 42)
      assert(JDI.ToInt(0xf3 as UInt8) == 243)

      assert(JDI.ToInt(42 as UInt32) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToInt(0xfffffff3 as UInt32) == -13)  // For 32-bit systems
      #else
         assert(JDI.ToInt(0xfffffff3 as UInt32) == 4294967283)  // For 64-bit systems
      #endif

      assert(JDI.ToInt(42 as UInt64) == 42)
      assert(JDI.ToInt(UInt64.max - 12) == -13)

      assert(JDI.ToInt(42 as UInt) == 42)
      assert(JDI.ToInt(UInt.max - 12) == -13)

      // To UInt8

      assert(JDI.ToUInt8(42 as Int8) == 42)
      assert(JDI.ToUInt8(-13 as Int8) == 0xf3)

      assert(JDI.ToUInt8(42 as Int32) == 42)
      assert(JDI.ToUInt8(-13 as Int32) == 0xf3)

      assert(JDI.ToUInt8(42 as Int64) == 42)
      assert(JDI.ToUInt8(-13 as Int64) == 0xf3)
      assert(JDI.ToUInt8(Int64.max - 12) == 0xf3)

      assert(JDI.ToUInt8(42 as Int) == 42)
      assert(JDI.ToUInt8(-13 as Int) == 0xf3)
      assert(JDI.ToUInt8(Int.max - 12) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt8) == 42)
      assert(JDI.ToUInt8(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt32) == 42)
      assert(JDI.ToUInt8(0xfffffff3 as UInt32) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt64) == 42)
      assert(JDI.ToUInt8(UInt64.max - 12) == 0xf3)

      assert(JDI.ToUInt8(42 as UInt) == 42)
      assert(JDI.ToUInt8(UInt.max - 12) == 0xf3)

      // To UInt32

      assert(JDI.ToUInt32(42 as Int8) == 42)
      assert(JDI.ToUInt32(-13 as Int8) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as Int32) == 42)
      assert(JDI.ToUInt32(-13 as Int32) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as Int64) == 42)
      assert(JDI.ToUInt32(-13 as Int64) == 0xfffffff3)
      assert(JDI.ToUInt32(Int64.max - 12) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as Int) == 42)
      assert(JDI.ToUInt32(-13 as Int) == 0xfffffff3)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt32(Int.max - 12) == 0x7ffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt32(Int.max - 12) == 0xfffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt32(42 as UInt8) == 42)
      assert(JDI.ToUInt32(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt32(42 as UInt32) == 42)
      assert(JDI.ToUInt32(0xfffffff3 as UInt32) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as UInt64) == 42)
      assert(JDI.ToUInt32(UInt64.max - 12) == 0xfffffff3)

      assert(JDI.ToUInt32(42 as UInt) == 42)
      assert(JDI.ToUInt32(UInt.max - 12) == 0xfffffff3)

      // To UInt64

      assert(JDI.ToUInt64(42 as Int8) == 42)
      assert(JDI.ToUInt64(-13 as Int8) == 0xfffffffffffffff3)

      assert(JDI.ToUInt64(42 as Int32) == 42)
      assert(JDI.ToUInt64(-13 as Int32) == 0xfffffffffffffff3)

      assert(JDI.ToUInt64(42 as Int64) == 42)
      assert(JDI.ToUInt64(-13 as Int64) == 0xfffffffffffffff3)
      assert(JDI.ToUInt64(Int64.max - 12) == (UInt64.max >> 1) - 12)

      assert(JDI.ToUInt64(42 as Int) == 42)
      assert(JDI.ToUInt64(-13 as Int) == 0xfffffffffffffff3)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt64(42 as UInt8) == 42)
      assert(JDI.ToUInt64(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt64(42 as UInt32) == 42)
      assert(JDI.ToUInt64(0xfffffff3 as UInt32) == 0xfffffff3)

      assert(JDI.ToUInt64(42 as UInt64) == 42)
      assert(JDI.ToUInt64(UInt64.max - 12) == 0xfffffffffffffff3)

      assert(JDI.ToUInt64(42 as UInt) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      // To UInt

      assert(JDI.ToUInt(42 as Int8) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(-13 as Int8) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(-13 as Int8) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as Int32) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(-13 as Int32) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(-13 as Int32) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as Int64) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(-13 as Int64) == 0xfffffff3)  // For 32-bit systems
         assert(JDI.ToUInt(Int64.max - 12) == 0xfffffff3)
      #else
         assert(JDI.ToUInt(-13 as Int64) == 0xfffffffffffffff3)  // For 64-bit systems
         assert(JDI.ToUInt(Int64.max - 12) == 0x7ffffffffffffff3)
      #endif

      assert(JDI.ToUInt(42 as Int) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(Int.max - 12) == 0x7ffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(Int.max - 12) == 0x7ffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as UInt8) == 42)
      assert(JDI.ToUInt(0xf3 as UInt8) == 0xf3)

      assert(JDI.ToUInt(42 as UInt32) == 42)
      assert(JDI.ToUInt(0xfffffff3 as UInt32) == 0xfffffff3)

      assert(JDI.ToUInt(42 as UInt64) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      assert(JDI.ToUInt(42 as UInt) == 42)
      #if (arch(i386) || arch(arm))
         assert(JDI.ToUInt(UInt.max - 12) == 0xfffffff3)  // For 32-bit systems
      #else
         assert(JDI.ToUInt(UInt.max - 12) == 0xfffffffffffffff3)  // For 64-bit systems
      #endif

      print("\nTesting JDI complete.\n")
   }

1
2018-01-30 08:13